api,server: purge expunged resources (#8999)

This PR introduces the functionality of purging removed DB entries for CloudStack entities (currently only for VirtualMachine). There would be three mechanisms for purging removed resources:

    Background task - CloudStack will run a background task which runs at a defined interval. Other parameters for this task can be controlled with new global settings.
    API - New admin-only API purgeExpungedResources. It will allow passing the following parameters - resourcetype, batchsize, startdate, enddate. Currently, API is not supported in the UI.
    Config for service offering. Service offerings can be created with purgeresources parameter which would allow purging resources immediately on expunge.

Following new global settings have been added:

    expunged.resources.purge.enabled: Default: false. Whether to run a background task to purge the expunged resources
    expunged.resources.purge.resources: Default: (empty). A comma-separated list of resource types that will be considered by the background task to purge the expunged resources. Currently only VirtualMachine is supported. An empty "value will result in considering all resource types for purging
    expunged.resources.purge.interval: Default: 86400. Interval (in seconds) for the background task to purge the expunged resources
    expunged.resources.purge.delay: Default: 300. Initial delay (in seconds) to start the background task to purge the expunged resources task.
    expunged.resources.purge.batch.size: Default: 50. Batch size to be used during expunged resources purging.
    expunged.resources.purge.start.time: Default: (empty). Start time to be used by the background task to purge the expunged resources. Use format yyyy-MM-dd or yyyy-MM-dd HH:mm:ss.
    expunged.resources.purge.keep.past.days: Default: 30. The number of days in the past from the execution time of the background task to purge the expunged resources for which the expunged resources must not be purged. To enable purging expunged resource till the execution of the background task, set the value to zero.
    expunged.resource.purge.job.delay: Default: 180. Delay (in seconds) to execute the purging of an expunged resource initiated by the configuration in the offering. Minimum value should be 180 seconds and if a lower value is set then the minimum value will be used.

Documentation PR: apache/cloudstack-documentation#397

Signed-off-by: Abhishek Kumar <abhishek.mrt22@gmail.com>
Co-authored-by: Wei Zhou <weizhou@apache.org>
Co-authored-by: Suresh Kumar Anaparti <sureshkumar.anaparti@gmail.com>
This commit is contained in:
Abhishek Kumar 2024-06-20 11:34:44 +05:30 committed by GitHub
parent 373f017002
commit 3e6900ac1a
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
100 changed files with 4054 additions and 59 deletions

View File

@ -87,7 +87,8 @@ jobs:
smoke/test_multipleips_per_nic
smoke/test_nested_virtualization
smoke/test_set_sourcenat
smoke/test_webhook_lifecycle",
smoke/test_webhook_lifecycle
smoke/test_purge_expunged_vms",
"smoke/test_network
smoke/test_network_acl
smoke/test_network_ipv6

View File

@ -721,6 +721,8 @@ public class EventTypes {
// SystemVM
public static final String EVENT_LIVE_PATCH_SYSTEMVM = "LIVE.PATCH.SYSTEM.VM";
//Purge resources
public static final String EVENT_PURGE_EXPUNGED_RESOURCES = "PURGE.EXPUNGED.RESOURCES";
// OBJECT STORE
public static final String EVENT_OBJECT_STORE_CREATE = "OBJECT.STORE.CREATE";

View File

@ -48,4 +48,7 @@ public interface LoadBalancingServiceProvider extends NetworkElement, IpDeployin
List<LoadBalancerTO> updateHealthChecks(Network network, List<LoadBalancingRule> lbrules);
boolean handlesOnlyRulesInTransitionState();
default void expungeLbVmRefs(List<Long> vmIds, Long batchSize) {
}
}

View File

@ -33,6 +33,9 @@ public interface ServiceOffering extends InfrastructureEntity, InternalIdentity,
static final String internalLbVmDefaultOffUniqueName = "Cloud.Com-InternalLBVm";
// leaving cloud.com references as these are identifyers and no real world addresses (check against DB)
static final String PURGE_DB_ENTITIES_KEY = "purge.db.entities";
enum State {
Inactive, Active,
}

View File

@ -388,6 +388,7 @@ public class ApiConstants {
public static final String PUBLIC_START_PORT = "publicport";
public static final String PUBLIC_END_PORT = "publicendport";
public static final String PUBLIC_ZONE = "publiczone";
public static final String PURGE_RESOURCES = "purgeresources";
public static final String RECEIVED_BYTES = "receivedbytes";
public static final String RECONNECT = "reconnect";
public static final String RECOVER = "recover";
@ -909,6 +910,7 @@ public class ApiConstants {
public static final String AUTOSCALE_VMGROUP_NAME = "autoscalevmgroupname";
public static final String BAREMETAL_DISCOVER_NAME = "baremetaldiscovername";
public static final String BAREMETAL_RCT_URL = "baremetalrcturl";
public static final String BATCH_SIZE = "batchsize";
public static final String UCS_DN = "ucsdn";
public static final String GSLB_PROVIDER = "gslbprovider";
public static final String EXCLUSIVE_GSLB_PROVIDER = "isexclusivegslbprovider";

View File

@ -246,6 +246,12 @@ public class CreateServiceOfferingCmd extends BaseCmd {
@Parameter(name = ApiConstants.ENCRYPT_ROOT, type = CommandType.BOOLEAN, description = "VMs using this offering require root volume encryption", since="4.18")
private Boolean encryptRoot;
@Parameter(name = ApiConstants.PURGE_RESOURCES, type = CommandType.BOOLEAN,
description = "Whether to cleanup instance and its associated resource from database upon expunge of the instance",
since="4.20")
private Boolean purgeResources;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
@ -273,7 +279,7 @@ public class CreateServiceOfferingCmd extends BaseCmd {
public String getServiceOfferingName() {
if (StringUtils.isEmpty(serviceOfferingName)) {
throw new InvalidParameterValueException("Failed to create service offering because offering name has not been spified.");
throw new InvalidParameterValueException("Failed to create service offering because offering name has not been specified.");
}
return serviceOfferingName;
}
@ -481,6 +487,10 @@ public class CreateServiceOfferingCmd extends BaseCmd {
return false;
}
public boolean isPurgeResources() {
return Boolean.TRUE.equals(purgeResources);
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////

View File

@ -89,6 +89,11 @@ public class UpdateServiceOfferingCmd extends BaseCmd {
description = "state of the service offering")
private String serviceOfferingState;
@Parameter(name = ApiConstants.PURGE_RESOURCES, type = CommandType.BOOLEAN,
description = "Whether to cleanup VM and its associated resource upon expunge",
since="4.20")
private Boolean purgeResources;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
@ -185,6 +190,10 @@ public class UpdateServiceOfferingCmd extends BaseCmd {
return state;
}
public boolean isPurgeResources() {
return Boolean.TRUE.equals(purgeResources);
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////

View File

@ -0,0 +1,131 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.admin.resource;
import java.util.Date;
import javax.inject.Inject;
import org.apache.cloudstack.acl.RoleType;
import org.apache.cloudstack.api.APICommand;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.ApiErrorCode;
import org.apache.cloudstack.api.BaseAsyncCmd;
import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.Parameter;
import org.apache.cloudstack.api.ResponseObject;
import org.apache.cloudstack.api.ServerApiException;
import org.apache.cloudstack.api.response.PurgeExpungedResourcesResponse;
import org.apache.cloudstack.api.response.SuccessResponse;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.resource.ResourceCleanupService;
import com.cloud.event.EventTypes;
@APICommand(name = "purgeExpungedResources",
description = "Purge expunged resources",
responseObject = SuccessResponse.class,
responseView = ResponseObject.ResponseView.Full,
requestHasSensitiveInfo = false,
responseHasSensitiveInfo = false,
authorized = {RoleType.Admin},
since = "4.20")
public class PurgeExpungedResourcesCmd extends BaseAsyncCmd {
@Inject
ResourceCleanupService resourceCleanupService;
/////////////////////////////////////////////////////
//////////////// API parameters /////////////////////
/////////////////////////////////////////////////////
@Parameter(name = ApiConstants.RESOURCE_TYPE, type = BaseCmd.CommandType.STRING,
description = "The type of the resource which need to be purged. Supported types: " +
"VirtualMachine")
private String resourceType;
@Parameter(name = ApiConstants.BATCH_SIZE, type = CommandType.LONG,
description = "The size of batch used during purging")
private Long batchSize;
@Parameter(name = ApiConstants.START_DATE,
type = CommandType.DATE,
description = "The start date range of the expunged resources used for purging " +
"(use format \"yyyy-MM-dd\" or \"yyyy-MM-dd HH:mm:ss\")")
private Date startDate;
@Parameter(name = ApiConstants.END_DATE,
type = CommandType.DATE,
description = "The end date range of the expunged resources used for purging " +
"(use format \"yyyy-MM-dd\" or \"yyyy-MM-dd HH:mm:ss\")")
private Date endDate;
/////////////////////////////////////////////////////
/////////////////// Accessors ///////////////////////
/////////////////////////////////////////////////////
public String getResourceType() {
return resourceType;
}
public Long getBatchSize() {
return batchSize;
}
public Date getStartDate() {
return startDate;
}
public Date getEndDate() {
return endDate;
}
@Override
public long getEntityOwnerId() {
return CallContext.current().getCallingAccount().getId();
}
@Override
public String getEventType() {
return EventTypes.EVENT_PURGE_EXPUNGED_RESOURCES;
}
@Override
public String getEventDescription() {
return "Purging expunged resources";
}
/////////////////////////////////////////////////////
/////////////// API Implementation///////////////////
/////////////////////////////////////////////////////
@Override
public void execute() {
try {
long result = resourceCleanupService.purgeExpungedResources(this);
PurgeExpungedResourcesResponse response = new PurgeExpungedResourcesResponse();
response.setResourceCount(result);
response.setObjectName(getCommandName().toLowerCase());
setResponseObject(response);
} catch (Exception e) {
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, e.getLocalizedMessage());
}
}
}

View File

@ -0,0 +1,39 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.response;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.BaseResponse;
import com.cloud.serializer.Param;
import com.google.gson.annotations.SerializedName;
public class PurgeExpungedResourcesResponse extends BaseResponse {
@SerializedName(ApiConstants.RESOURCE_COUNT)
@Param(description = "The count of the purged expunged resources")
private Long resourceCount;
public Long getResourceCount() {
return resourceCount;
}
public void setResourceCount(Long resourceCount) {
this.resourceCount = resourceCount;
}
}

View File

@ -234,6 +234,10 @@ public class ServiceOfferingResponse extends BaseResponseWithAnnotations {
@Param(description = "true if virtual machine root disk will be encrypted on storage", since = "4.18")
private Boolean encryptRoot;
@SerializedName(ApiConstants.PURGE_RESOURCES)
@Param(description = "Whether to cleanup VM and its associated resource upon expunge", since = "4.20")
private Boolean purgeResources;
public ServiceOfferingResponse() {
}
@ -555,4 +559,8 @@ public class ServiceOfferingResponse extends BaseResponseWithAnnotations {
}
public void setEncryptRoot(Boolean encrypt) { this.encryptRoot = encrypt; }
public void setPurgeResources(Boolean purgeResources) {
this.purgeResources = purgeResources;
}
}

View File

@ -0,0 +1,74 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.resource;
import org.apache.cloudstack.api.command.admin.resource.PurgeExpungedResourcesCmd;
import org.apache.cloudstack.framework.config.ConfigKey;
import com.cloud.vm.VirtualMachine;
public interface ResourceCleanupService {
int MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS = 3 * 60;
ConfigKey<Boolean> ExpungedResourcePurgeEnabled = new ConfigKey<>("Advanced", Boolean.class,
"expunged.resources.purge.enabled", "false",
"Whether to run a background task to purge the DB records of the expunged resources",
false, ConfigKey.Scope.Global);
ConfigKey<String> ExpungedResourcePurgeResources = new ConfigKey<>("Advanced", String.class,
"expunged.resources.purge.resources", "",
"A comma-separated list of resource types that will be considered by the background task " +
"to purge the DB records of the expunged resources. Currently only VirtualMachine is supported. " +
"An empty value will result in considering all resource types for purging",
false, ConfigKey.Scope.Global);
ConfigKey<Integer> ExpungedResourcesPurgeInterval = new ConfigKey<>("Advanced", Integer.class,
"expunged.resources.purge.interval", "86400",
"Interval (in seconds) for the background task to purge the DB records of the expunged resources",
false);
ConfigKey<Integer> ExpungedResourcesPurgeDelay = new ConfigKey<>("Advanced", Integer.class,
"expunged.resources.purge.delay", "300",
"Initial delay (in seconds) to start the background task to purge the DB records of the " +
"expunged resources task", false);
ConfigKey<Integer> ExpungedResourcesPurgeBatchSize = new ConfigKey<>("Advanced", Integer.class,
"expunged.resources.purge.batch.size", "50",
"Batch size to be used during purging of the DB records of the expunged resources",
true);
ConfigKey<String> ExpungedResourcesPurgeStartTime = new ConfigKey<>("Advanced", String.class,
"expunged.resources.purge.start.time", "",
"Start time to be used by the background task to purge the DB records of the expunged " +
"resources. Use format \"yyyy-MM-dd\" or \"yyyy-MM-dd HH:mm:ss\"", true);
ConfigKey<Integer> ExpungedResourcesPurgeKeepPastDays = new ConfigKey<>("Advanced", Integer.class,
"expunged.resources.purge.keep.past.days", "30",
"The number of days in the past from the execution time of the background task to purge " +
"the DB records of the expunged resources for which the expunged resources must not be purged. " +
"To enable purging DB records of the expunged resource till the execution of the background " +
"task, set the value to zero.", true);
ConfigKey<Integer> ExpungedResourcePurgeJobDelay = new ConfigKey<>("Advanced", Integer.class,
"expunged.resource.purge.job.delay",
String.valueOf(MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS),
String.format("Delay (in seconds) to execute the purging of the DB records of an expunged resource " +
"initiated by the configuration in the offering. Minimum value should be %d seconds " +
"and if a lower value is set then the minimum value will be used",
MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS),
true);
enum ResourceType {
VirtualMachine
}
long purgeExpungedResources(PurgeExpungedResourcesCmd cmd);
void purgeExpungedVmResourcesLaterIfNeeded(VirtualMachine vm);
}

View File

@ -37,4 +37,22 @@ public class CreateServiceOfferingCmdTest {
Assert.assertEquals(createServiceOfferingCmd.getDisplayText(), netName);
}
@Test
public void testIsPurgeResourcesNoOrNullValue() {
Assert.assertFalse(createServiceOfferingCmd.isPurgeResources());
ReflectionTestUtils.setField(createServiceOfferingCmd, "purgeResources", false);
Assert.assertFalse(createServiceOfferingCmd.isPurgeResources());
}
@Test
public void testIsPurgeResourcesFalse() {
ReflectionTestUtils.setField(createServiceOfferingCmd, "purgeResources", false);
Assert.assertFalse(createServiceOfferingCmd.isPurgeResources());
}
@Test
public void testIsPurgeResourcesTrue() {
ReflectionTestUtils.setField(createServiceOfferingCmd, "purgeResources", true);
Assert.assertTrue(createServiceOfferingCmd.isPurgeResources());
}
}

View File

@ -0,0 +1,51 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.admin.offering;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.junit.MockitoJUnitRunner;
import org.springframework.test.util.ReflectionTestUtils;
@RunWith(MockitoJUnitRunner.class)
public class UpdateServiceOfferingCmdTest {
@InjectMocks
private UpdateServiceOfferingCmd updateServiceOfferingCmd;
@Test
public void testIsPurgeResourcesNoOrNullValue() {
Assert.assertFalse(updateServiceOfferingCmd.isPurgeResources());
ReflectionTestUtils.setField(updateServiceOfferingCmd, "purgeResources", false);
Assert.assertFalse(updateServiceOfferingCmd.isPurgeResources());
}
@Test
public void testIsPurgeResourcesFalse() {
ReflectionTestUtils.setField(updateServiceOfferingCmd, "purgeResources", false);
Assert.assertFalse(updateServiceOfferingCmd.isPurgeResources());
}
@Test
public void testIsPurgeResourcesTrue() {
ReflectionTestUtils.setField(updateServiceOfferingCmd, "purgeResources", true);
Assert.assertTrue(updateServiceOfferingCmd.isPurgeResources());
}
}

View File

@ -0,0 +1,104 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.api.command.admin.resource;
import static org.junit.Assert.assertNull;
import java.util.Date;
import org.apache.cloudstack.api.ServerApiException;
import org.apache.cloudstack.api.response.PurgeExpungedResourcesResponse;
import org.apache.cloudstack.resource.ResourceCleanupService;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import org.springframework.test.util.ReflectionTestUtils;
import com.cloud.utils.exception.CloudRuntimeException;
@RunWith(MockitoJUnitRunner.class)
public class PurgeExpungedResourcesCmdTest {
@Mock
ResourceCleanupService resourceCleanupService;
@Spy
@InjectMocks
PurgeExpungedResourcesCmd spyCmd = Mockito.spy(new PurgeExpungedResourcesCmd());
@Test
public void testGetResourceType() {
PurgeExpungedResourcesCmd cmd = new PurgeExpungedResourcesCmd();
assertNull(cmd.getResourceType());
ReflectionTestUtils.setField(cmd, "resourceType", ResourceCleanupService.ResourceType.VirtualMachine.toString());
Assert.assertEquals(ResourceCleanupService.ResourceType.VirtualMachine.toString(), cmd.getResourceType());
}
@Test
public void testGetBatchSize() {
PurgeExpungedResourcesCmd cmd = new PurgeExpungedResourcesCmd();
assertNull(cmd.getBatchSize());
Long batchSize = 100L;
ReflectionTestUtils.setField(cmd, "batchSize", batchSize);
Assert.assertEquals(batchSize, cmd.getBatchSize());
}
@Test
public void testGetStartDate() {
PurgeExpungedResourcesCmd cmd = new PurgeExpungedResourcesCmd();
assertNull(cmd.getStartDate());
Date date = new Date();
ReflectionTestUtils.setField(cmd, "startDate", date);
Assert.assertEquals(date, cmd.getStartDate());
}
@Test
public void testGetEndDate() {
PurgeExpungedResourcesCmd cmd = new PurgeExpungedResourcesCmd();
assertNull(cmd.getEndDate());
Date date = new Date();
ReflectionTestUtils.setField(cmd, "endDate", date);
Assert.assertEquals(date, cmd.getEndDate());
}
@Test
public void testExecute() {
final PurgeExpungedResourcesResponse[] executeResponse = new PurgeExpungedResourcesResponse[1];
Long result = 100L;
Mockito.when(resourceCleanupService.purgeExpungedResources(Mockito.any())).thenReturn(result);
Mockito.doAnswer((Answer<Void>) invocation -> {
executeResponse[0] = (PurgeExpungedResourcesResponse)invocation.getArguments()[0];
return null;
}).when(spyCmd).setResponseObject(Mockito.any());
spyCmd.execute();
PurgeExpungedResourcesResponse response = executeResponse[0];
Assert.assertNotNull(response);
Assert.assertEquals(result, response.getResourceCount());
}
@Test(expected = ServerApiException.class)
public void testExecuteException() {
Mockito.doThrow(CloudRuntimeException.class).when(resourceCleanupService).purgeExpungedResources(Mockito.any());
spyCmd.execute();
}
}

View File

@ -348,4 +348,6 @@ public interface NetworkOrchestrationService {
Pair<NicProfile, Integer> importNic(final String macAddress, int deviceId, final Network network, final Boolean isDefaultNic, final VirtualMachine vm, final Network.IpAddresses ipAddresses, final DataCenter datacenter, boolean forced) throws InsufficientVirtualNetworkCapacityException, InsufficientAddressCapacityException;
void unmanageNics(VirtualMachineProfile vm);
void expungeLbVmRefs(List<Long> vmIds, Long batchSize);
}

View File

@ -156,4 +156,5 @@ public interface HighAvailabilityManager extends Manager {
String getHaTag();
DeploymentPlanner getHAPlanner();
int expungeWorkItemsByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -90,6 +90,7 @@ import org.apache.cloudstack.framework.messagebus.MessageHandler;
import org.apache.cloudstack.jobs.JobInfo;
import org.apache.cloudstack.managed.context.ManagedContextRunnable;
import org.apache.cloudstack.reservation.dao.ReservationDao;
import org.apache.cloudstack.resource.ResourceCleanupService;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
import org.apache.cloudstack.storage.to.VolumeObjectTO;
@ -402,6 +403,8 @@ public class VirtualMachineManagerImpl extends ManagerBase implements VirtualMac
private VpcDao vpcDao;
@Inject
private DomainDao domainDao;
@Inject
ResourceCleanupService resourceCleanupService;
VmWorkJobHandlerProxy _jobHandlerProxy = new VmWorkJobHandlerProxy(this);
@ -691,6 +694,7 @@ public class VirtualMachineManagerImpl extends ManagerBase implements VirtualMac
if (logger.isDebugEnabled()) {
logger.debug("Expunged " + vm);
}
resourceCleanupService.purgeExpungedVmResourcesLaterIfNeeded(vm);
}
private void handleUnsuccessfulExpungeOperation(List<Command> finalizeExpungeCommands, List<Command> nicExpungeCommands,

View File

@ -4738,6 +4738,19 @@ public class NetworkOrchestrator extends ManagerBase implements NetworkOrchestra
}
}
@Override
public void expungeLbVmRefs(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(networkElements) || CollectionUtils.isEmpty(vmIds)) {
return;
}
for (NetworkElement element : networkElements) {
if (element instanceof LoadBalancingServiceProvider) {
LoadBalancingServiceProvider lbProvider = (LoadBalancingServiceProvider)element;
lbProvider.expungeLbVmRefs(vmIds, batchSize);
}
}
}
@Override
public String getConfigComponentName() {
return NetworkOrchestrationService.class.getSimpleName();

View File

@ -35,4 +35,6 @@ public interface AutoScaleVmGroupVmMapDao extends GenericDao<AutoScaleVmGroupVmM
public boolean removeByVm(long vmId);
public boolean removeByGroup(long vmGroupId);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -18,7 +18,10 @@ package com.cloud.network.as.dao;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.network.as.AutoScaleVmGroupVmMapVO;
@ -31,9 +34,6 @@ import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine.State;
import com.cloud.vm.dao.VMInstanceDao;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
@Component
public class AutoScaleVmGroupVmMapDaoImpl extends GenericDaoBase<AutoScaleVmGroupVmMapVO, Long> implements AutoScaleVmGroupVmMapDao {
@ -115,4 +115,16 @@ public class AutoScaleVmGroupVmMapDaoImpl extends GenericDaoBase<AutoScaleVmGrou
sc.setParameters("vmGroupId", vmGroupId);
return remove(sc) >= 0;
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<AutoScaleVmGroupVmMapVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<AutoScaleVmGroupVmMapVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -105,4 +105,6 @@ public interface IPAddressDao extends GenericDao<IPAddressVO, Long> {
void buildQuarantineSearchCriteria(SearchCriteria<IPAddressVO> sc);
IPAddressVO findBySourceNetworkIdAndDatacenterIdAndState(long sourceNetworkId, long dataCenterId, State state);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -26,6 +26,7 @@ import javax.inject.Inject;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.resourcedetail.dao.UserIpAddressDetailsDao;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.dc.Vlan.VlanType;
@ -561,4 +562,16 @@ public class IPAddressDaoImpl extends GenericDaoBase<IPAddressVO, Long> implemen
sc.setParameters("state", State.Free);
return findOneBy(sc);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<IPAddressVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getAssociatedWithVmId(), SearchCriteria.Op.IN);
SearchCriteria<IPAddressVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -16,10 +16,14 @@
// under the License.
package com.cloud.network.dao;
import java.util.List;
import com.cloud.utils.db.GenericDao;
public interface InlineLoadBalancerNicMapDao extends GenericDao<InlineLoadBalancerNicMapVO, Long> {
InlineLoadBalancerNicMapVO findByPublicIpAddress(String publicIpAddress);
InlineLoadBalancerNicMapVO findByNicId(long nicId);
int expungeByNicList(List<Long> nicIds, Long batchSize);
}

View File

@ -17,9 +17,13 @@
package com.cloud.network.dao;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@Component
@ -41,4 +45,15 @@ public class InlineLoadBalancerNicMapDaoImpl extends GenericDaoBase<InlineLoadBa
return findOneBy(sc);
}
@Override
public int expungeByNicList(List<Long> nicIds, Long batchSize) {
if (CollectionUtils.isEmpty(nicIds)) {
return 0;
}
SearchBuilder<InlineLoadBalancerNicMapVO> sb = createSearchBuilder();
sb.and("nicIds", sb.entity().getNicId(), SearchCriteria.Op.IN);
SearchCriteria<InlineLoadBalancerNicMapVO> sc = sb.create();
sc.setParameters("nicIds", nicIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -42,4 +42,5 @@ public interface LoadBalancerVMMapDao extends GenericDao<LoadBalancerVMMapVO, Lo
LoadBalancerVMMapVO findByLoadBalancerIdAndVmIdVmIp(long loadBalancerId, long instanceId, String instanceIp);
void remove(long id, long instanceId, String instanceIp, Boolean revoke);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -18,11 +18,12 @@ package com.cloud.network.dao;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.SearchCriteria.Func;
@ -135,4 +136,16 @@ public class LoadBalancerVMMapDaoImpl extends GenericDaoBase<LoadBalancerVMMapVO
sc.addAnd("instanceId", SearchCriteria.Op.EQ, instanceId);
return listBy(sc);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<LoadBalancerVMMapVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<LoadBalancerVMMapVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -18,8 +18,12 @@
package com.cloud.network.dao;
import java.util.List;
import com.cloud.utils.db.GenericDao;
public interface OpRouterMonitorServiceDao extends GenericDao<OpRouterMonitorServiceVO, Long> {
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -17,10 +17,27 @@
package com.cloud.network.dao;
import com.cloud.utils.db.GenericDaoBase;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@Component
public class OpRouterMonitorServiceDaoImpl extends GenericDaoBase<OpRouterMonitorServiceVO, Long> implements OpRouterMonitorServiceDao {
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<OpRouterMonitorServiceVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getId(), SearchCriteria.Op.IN);
SearchCriteria<OpRouterMonitorServiceVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -47,4 +47,5 @@ public interface PortForwardingRulesDao extends GenericDao<PortForwardingRuleVO,
PortForwardingRuleVO findByIdAndIp(long id, String secondaryIp);
List<PortForwardingRuleVO> listByNetworkAndDestIpAddr(String ip4Address, long networkId);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -20,6 +20,7 @@ import java.util.List;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.network.dao.FirewallRulesCidrsDao;
@ -170,4 +171,16 @@ public class PortForwardingRulesDaoImpl extends GenericDaoBase<PortForwardingRul
sc.setParameters("dstIp", secondaryIp);
return findOneBy(sc);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<PortForwardingRuleVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getVirtualMachineId(), SearchCriteria.Op.IN);
SearchCriteria<PortForwardingRuleVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -17,10 +17,12 @@
package com.cloud.secstorage;
import java.util.Date;
import java.util.List;
import com.cloud.utils.db.GenericDao;
public interface CommandExecLogDao extends GenericDao<CommandExecLogVO, Long> {
public void expungeExpiredRecords(Date cutTime);
public Integer getCopyCmdCountForSSVM(Long id);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -19,6 +19,7 @@ package com.cloud.secstorage;
import java.util.Date;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.GenericDaoBase;
@ -57,4 +58,16 @@ public class CommandExecLogDaoImpl extends GenericDaoBase<CommandExecLogVO, Long
List<CommandExecLogVO> copyCmds = customSearch(sc, null);
return copyCmds.size();
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<CommandExecLogVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<CommandExecLogVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -57,4 +57,5 @@ public interface SnapshotDao extends GenericDao<SnapshotVO, Long>, StateDao<Snap
* @return A list of snapshots filtered by ids.
*/
List<SnapshotVO> listByIds(Object... ids);
List<SnapshotVO> searchByVolumes(List<Long> volumeIds);
}

View File

@ -18,11 +18,13 @@ package com.cloud.storage.dao;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.server.ResourceTag.ResourceObjectType;
@ -285,4 +287,16 @@ public class SnapshotDaoImpl extends GenericDaoBase<SnapshotVO, Long> implements
sc.setParameters("status", (Object[]) status);
return listBy(sc, null);
}
@Override
public List<SnapshotVO> searchByVolumes(List<Long> volumeIds) {
if (CollectionUtils.isEmpty(volumeIds)) {
return new ArrayList<>();
}
SearchBuilder<SnapshotVO> sb = createSearchBuilder();
sb.and("volumeIds", sb.entity().getVolumeId(), SearchCriteria.Op.IN);
SearchCriteria<SnapshotVO> sc = sb.create();
sc.setParameters("volumeIds", volumeIds.toArray());
return search(sc, null);
}
}

View File

@ -159,4 +159,5 @@ public interface VolumeDao extends GenericDao<VolumeVO, Long>, StateDao<Volume.S
List<VolumeVO> listAllocatedVolumesForAccountDiskOfferingIdsAndNotForVms(long accountId, List<Long> diskOfferingIds, List<Long> vmIds);
List<VolumeVO> searchRemovedByVms(List<Long> vmIds, Long batchSize);
}

View File

@ -27,14 +27,12 @@ import java.util.stream.Collectors;
import javax.inject.Inject;
import com.cloud.configuration.Resource;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.db.TransactionCallback;
import org.apache.cloudstack.reservation.ReservationVO;
import org.apache.cloudstack.reservation.dao.ReservationDao;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.configuration.Resource;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.server.ResourceTag.ResourceObjectType;
@ -48,12 +46,15 @@ import com.cloud.storage.VolumeVO;
import com.cloud.tags.dao.ResourceTagDao;
import com.cloud.utils.Pair;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.SearchCriteria.Func;
import com.cloud.utils.db.SearchCriteria.Op;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.db.TransactionCallback;
import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.db.UpdateBuilder;
import com.cloud.utils.exception.CloudRuntimeException;
@ -895,4 +896,18 @@ public class VolumeDaoImpl extends GenericDaoBase<VolumeVO, Long> implements Vol
return volume;
});
}
@Override
public List<VolumeVO> searchRemovedByVms(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return new ArrayList<>();
}
SearchBuilder<VolumeVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NNULL);
SearchCriteria<VolumeVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
Filter filter = new Filter(VolumeVO.class, "id", true, 0L, batchSize);
return searchIncludingRemoved(sc, filter, null, false);
}
}

View File

@ -41,5 +41,6 @@ public interface ItWorkDao extends GenericDao<ItWorkVO, String> {
boolean updateStep(ItWorkVO work, Step step);
List<ItWorkVO> listWorkInProgressFor(long nodeId);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -18,7 +18,7 @@ package com.cloud.vm;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.GenericDaoBase;
@ -103,4 +103,16 @@ public class ItWorkDaoImpl extends GenericDaoBase<ItWorkVO, String> implements I
return search(sc, null);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<ItWorkVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<ItWorkVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -23,6 +23,7 @@ import com.cloud.vm.ConsoleSessionVO;
import com.cloud.utils.db.GenericDao;
import java.util.Date;
import java.util.List;
public interface ConsoleSessionDao extends GenericDao<ConsoleSessionVO, Long> {
@ -33,4 +34,6 @@ public interface ConsoleSessionDao extends GenericDao<ConsoleSessionVO, Long> {
int expungeSessionsOlderThanDate(Date date);
void acquireSession(String sessionUuid);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -20,6 +20,9 @@
package com.cloud.vm.dao;
import java.util.Date;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
@ -65,5 +68,15 @@ public class ConsoleSessionDaoImpl extends GenericDaoBase<ConsoleSessionVO, Long
update(consoleSessionVO.getId(), consoleSessionVO);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<ConsoleSessionVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<ConsoleSessionVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -100,4 +100,5 @@ public interface NicDao extends GenericDao<NicVO, Long> {
NicVO findByIpAddressAndVmType(String ip, VirtualMachine.Type vmType);
List<NicVO> listByNetworkIdAndType(long networkId, VirtualMachine.Type vmType);
List<NicVO> searchRemovedByVms(List<Long> vmIds, Long batchSize);
}

View File

@ -17,11 +17,13 @@
package com.cloud.vm.dao;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.Filter;
@ -428,4 +430,18 @@ public class NicDaoImpl extends GenericDaoBase<NicVO, Long> implements NicDao {
sc.setParameters("vmType", vmType);
return listBy(sc);
}
@Override
public List<NicVO> searchRemovedByVms(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return new ArrayList<>();
}
SearchBuilder<NicVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NNULL);
SearchCriteria<NicVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
Filter filter = new Filter(NicVO.class, "id", true, 0L, batchSize);
return searchIncludingRemoved(sc, filter, null, false);
}
}

View File

@ -29,4 +29,5 @@ public interface NicExtraDhcpOptionDao extends GenericDao<NicExtraDhcpOptionVO,
* @param extraDhcpOptions
*/
void saveExtraDhcpOptions(List<NicExtraDhcpOptionVO> extraDhcpOptions);
int expungeByNicList(List<Long> nicIds, Long batchSize);
}

View File

@ -16,13 +16,13 @@
// under the License.
package com.cloud.vm.dao;
import org.springframework.stereotype.Component;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.NicExtraDhcpOption;
@ -74,4 +74,15 @@ public class NicExtraDhcpOptionDaoImpl extends GenericDaoBase<NicExtraDhcpOptio
expunge(sc);
}
@Override
public int expungeByNicList(List<Long> nicIds, Long batchSize) {
if (CollectionUtils.isEmpty(nicIds)) {
return 0;
}
SearchBuilder<NicExtraDhcpOptionVO> sb = createSearchBuilder();
sb.and("nicIds", sb.entity().getNicId(), SearchCriteria.Op.IN);
SearchCriteria<NicExtraDhcpOptionVO> sc = sb.create();
sc.setParameters("nicIds", nicIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -55,4 +55,5 @@ public interface NicSecondaryIpDao extends GenericDao<NicSecondaryIpVO, Long> {
List<NicSecondaryIpVO> listSecondaryIpUsingKeyword(long nicId, String keyword);
int moveSecondaryIps(long fromNicId, long toNicId);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -19,6 +19,7 @@ package com.cloud.vm.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
@ -192,4 +193,16 @@ public class NicSecondaryIpDaoImpl extends GenericDaoBase<NicSecondaryIpVO, Long
return update(update, sc);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<NicSecondaryIpVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getVmId(), SearchCriteria.Op.IN);
SearchCriteria<NicSecondaryIpVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -165,4 +165,7 @@ public interface VMInstanceDao extends GenericDao<VMInstanceVO, Long>, StateDao<
void updateSystemVmTemplateId(long templateId, Hypervisor.HypervisorType hypervisorType);
List<VMInstanceVO> listByHostOrLastHostOrHostPod(List<Long> hostIds, long podId);
List<VMInstanceVO> searchRemovedByRemoveDate(final Date startDate, final Date endDate, final Long batchSize,
List<Long> skippedVmIds);
}

View File

@ -28,6 +28,7 @@ import java.util.Map;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.host.HostVO;
@ -39,6 +40,7 @@ import com.cloud.utils.DateUtil;
import com.cloud.utils.Pair;
import com.cloud.utils.db.Attribute;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.JoinBuilder;
@ -1016,4 +1018,26 @@ public class VMInstanceDaoImpl extends GenericDaoBase<VMInstanceVO, Long> implem
sc.setParameters("podId", String.valueOf(podId));
return listBy(sc);
}
@Override
public List<VMInstanceVO> searchRemovedByRemoveDate(Date startDate, Date endDate, Long batchSize,
List<Long> skippedVmIds) {
SearchBuilder<VMInstanceVO> sb = createSearchBuilder();
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NNULL);
sb.and("startDate", sb.entity().getRemoved(), SearchCriteria.Op.GTEQ);
sb.and("endDate", sb.entity().getRemoved(), SearchCriteria.Op.LTEQ);
sb.and("skippedVmIds", sb.entity().getId(), Op.NOTIN);
SearchCriteria<VMInstanceVO> sc = sb.create();
if (startDate != null) {
sc.setParameters("startDate", startDate);
}
if (endDate != null) {
sc.setParameters("endDate", endDate);
}
if (CollectionUtils.isNotEmpty(skippedVmIds)) {
sc.setParameters("skippedVmIds", skippedVmIds.toArray());
}
Filter filter = new Filter(VMInstanceVO.class, "id", true, 0L, batchSize);
return searchIncludingRemoved(sc, filter, null, false);
}
}

View File

@ -38,4 +38,6 @@ public interface VMSnapshotDao extends GenericDao<VMSnapshotVO, Long>, StateDao<
VMSnapshotVO findByName(Long vmId, String name);
List<VMSnapshotVO> listByAccountId(Long accountId);
List<VMSnapshotVO> searchByVms(List<Long> vmIds);
List<VMSnapshotVO> searchRemovedByVms(List<Long> vmIds, Long batchSize);
}

View File

@ -17,12 +17,14 @@
package com.cloud.vm.snapshot.dao;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@ -180,4 +182,29 @@ public class VMSnapshotDaoImpl extends GenericDaoBase<VMSnapshotVO, Long> implem
return rows > 0;
}
@Override
public List<VMSnapshotVO> searchByVms(List<Long> vmIds) {
if (CollectionUtils.isEmpty(vmIds)) {
return new ArrayList<>();
}
SearchBuilder<VMSnapshotVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getVmId(), SearchCriteria.Op.IN);
SearchCriteria<VMSnapshotVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return search(sc, null);
}
@Override
public List<VMSnapshotVO> searchRemovedByVms(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return new ArrayList<>();
}
SearchBuilder<VMSnapshotVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getVmId(), SearchCriteria.Op.IN);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NNULL);
SearchCriteria<VMSnapshotVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
Filter filter = new Filter(VMSnapshotVO.class, "id", true, 0L, batchSize);
return searchIncludingRemoved(sc, filter, null, false);
}
}

View File

@ -97,4 +97,6 @@ public interface ResourceDetailsDao<R extends ResourceDetail> extends GenericDao
public void addDetail(long resourceId, String key, String value, boolean display);
public List<Long> findResourceIdsByNameAndValueIn(String name, Object[] values);
public long batchExpungeForResources(List<Long> ids, Long batchSize);
}

View File

@ -21,13 +21,14 @@ import java.util.List;
import java.util.Map;
import org.apache.cloudstack.api.ResourceDetail;
import org.apache.commons.collections.CollectionUtils;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.db.SearchCriteria.Op;
import com.cloud.utils.db.TransactionLegacy;
public abstract class ResourceDetailsDaoBase<R extends ResourceDetail> extends GenericDaoBase<R, Long> implements ResourceDetailsDao<R> {
private SearchBuilder<R> AllFieldsSearch;
@ -201,4 +202,17 @@ public abstract class ResourceDetailsDaoBase<R extends ResourceDetail> extends G
return customSearch(sc, null);
}
@Override
public long batchExpungeForResources(final List<Long> ids, final Long batchSize) {
if (CollectionUtils.isEmpty(ids)) {
return 0;
}
SearchBuilder<R> sb = createSearchBuilder();
sb.and("ids", sb.entity().getResourceId(), Op.IN);
sb.done();
SearchCriteria<R> sc = sb.create();
sc.setParameters("ids", ids.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -106,4 +106,6 @@ StateDao<ObjectInDataStoreStateMachine.State, ObjectInDataStoreStateMachine.Even
SnapshotDataStoreVO findOneBySnapshotAndDatastoreRole(long snapshotId, DataStoreRole role);
void updateDisplayForSnapshotStoreRole(long snapshotId, long storeId, DataStoreRole role, boolean display);
int expungeBySnapshotList(List<Long> snapshotIds, Long batchSize);
}

View File

@ -559,4 +559,16 @@ public class SnapshotDataStoreDaoImpl extends GenericDaoBase<SnapshotDataStoreVO
ref.setDisplay(display);
update(ref.getId(), ref);
}
@Override
public int expungeBySnapshotList(final List<Long> snapshotIds, final Long batchSize) {
if (CollectionUtils.isEmpty(snapshotIds)) {
return 0;
}
SearchBuilder<SnapshotDataStoreVO> sb = createSearchBuilder();
sb.and("snapshotIds", sb.entity().getSnapshotId(), SearchCriteria.Op.IN);
SearchCriteria<SnapshotDataStoreVO> sc = sb.create();
sc.setParameters("snapshotIds", snapshotIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -59,4 +59,6 @@ public interface VolumeDataStoreDao extends GenericDao<VolumeDataStoreVO, Long>,
List<VolumeDataStoreVO> listByVolume(long volumeId, long storeId);
List<VolumeDataStoreVO> listByStoreIdAndInstallPaths(Long storeId, List<String> paths);
int expungeByVolumeList(List<Long> volumeIds, Long batchSize);
}

View File

@ -19,11 +19,9 @@
package com.cloud.network.as.dao;
import com.cloud.network.as.AutoScaleVmGroupVmMapVO;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.VirtualMachine;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
@ -33,9 +31,13 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import java.util.Arrays;
import java.util.List;
import com.cloud.network.as.AutoScaleVmGroupVmMapVO;
import com.cloud.utils.db.GenericSearchBuilder;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.VirtualMachine;
@RunWith(MockitoJUnitRunner.class)
public class AutoScaleVmGroupVmMapDaoImplTest {
@ -198,4 +200,33 @@ public class AutoScaleVmGroupVmMapDaoImplTest {
Mockito.verify(searchCriteriaAutoScaleVmGroupVmMapVOMock).setParameters("vmGroupId", groupId);
Mockito.verify(AutoScaleVmGroupVmMapDaoImplSpy).remove(searchCriteriaAutoScaleVmGroupVmMapVOMock);
}
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, AutoScaleVmGroupVmMapDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, AutoScaleVmGroupVmMapDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<AutoScaleVmGroupVmMapVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<AutoScaleVmGroupVmMapVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(AutoScaleVmGroupVmMapDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(AutoScaleVmGroupVmMapDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final AutoScaleVmGroupVmMapVO mockedVO = Mockito.mock(AutoScaleVmGroupVmMapVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), AutoScaleVmGroupVmMapDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(AutoScaleVmGroupVmMapDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class IPAddressDaoImplTest {
@Spy
IPAddressDaoImpl ipAddressDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, ipAddressDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, ipAddressDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<IPAddressVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<IPAddressVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(ipAddressDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(ipAddressDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final IPAddressVO mockedVO = Mockito.mock(IPAddressVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), ipAddressDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(ipAddressDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class InlineLoadBalancerNicMapDaoImplTest {
@Spy
InlineLoadBalancerNicMapDaoImpl inlineLoadBalancerNicMapDaoImplSpy;
@Test
public void testExpungeByNicListNoVms() {
Assert.assertEquals(0, inlineLoadBalancerNicMapDaoImplSpy.expungeByNicList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, inlineLoadBalancerNicMapDaoImplSpy.expungeByNicList(
null, 100L));
}
@Test
public void testExpungeByNicList() {
SearchBuilder<InlineLoadBalancerNicMapVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<InlineLoadBalancerNicMapVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(inlineLoadBalancerNicMapDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(inlineLoadBalancerNicMapDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final InlineLoadBalancerNicMapVO mockedVO = Mockito.mock(InlineLoadBalancerNicMapVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), inlineLoadBalancerNicMapDaoImplSpy.expungeByNicList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("nicIds", array);
Mockito.verify(inlineLoadBalancerNicMapDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class LoadBalancerVMMapDaoImplTest {
@Spy
LoadBalancerVMMapDaoImpl loadBalancerVMMapDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, loadBalancerVMMapDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, loadBalancerVMMapDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<LoadBalancerVMMapVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<LoadBalancerVMMapVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(loadBalancerVMMapDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(loadBalancerVMMapDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final LoadBalancerVMMapVO mockedVO = Mockito.mock(LoadBalancerVMMapVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), loadBalancerVMMapDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(loadBalancerVMMapDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class OpRouterMonitorServiceDaoImplTest {
@Spy
OpRouterMonitorServiceDaoImpl opRouterMonitorServiceDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, opRouterMonitorServiceDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, opRouterMonitorServiceDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<OpRouterMonitorServiceVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<OpRouterMonitorServiceVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(opRouterMonitorServiceDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(opRouterMonitorServiceDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final OpRouterMonitorServiceVO mockedVO = Mockito.mock(OpRouterMonitorServiceVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), opRouterMonitorServiceDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(opRouterMonitorServiceDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.network.rules.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.network.rules.PortForwardingRuleVO;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class PortForwardingRulesDaoImplTest {
@Spy
PortForwardingRulesDaoImpl portForwardingRulesDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, portForwardingRulesDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, portForwardingRulesDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<PortForwardingRuleVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<PortForwardingRuleVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(portForwardingRulesDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(portForwardingRulesDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final PortForwardingRuleVO mockedVO = Mockito.mock(PortForwardingRuleVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), portForwardingRulesDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(portForwardingRulesDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.secstorage;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class CommandExecLogDaoImplTest {
@Spy
CommandExecLogDaoImpl commandExecLogDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, commandExecLogDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, commandExecLogDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<CommandExecLogVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<CommandExecLogVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(commandExecLogDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(commandExecLogDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final CommandExecLogVO mockedVO = Mockito.mock(CommandExecLogVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), commandExecLogDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(commandExecLogDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -26,16 +26,25 @@ import static org.mockito.Mockito.when;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import com.cloud.storage.VolumeVO;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.TransactionLegacy;
@RunWith(MockitoJUnitRunner.class)
@ -48,6 +57,7 @@ public class VolumeDaoImplTest {
private static MockedStatic<TransactionLegacy> mockedTransactionLegacy;
@Spy
private final VolumeDaoImpl volumeDao = new VolumeDaoImpl();
@BeforeClass
@ -102,4 +112,34 @@ public class VolumeDaoImplTest {
verify(preparedStatementMock, times(2)).setLong(anyInt(), anyLong());
verify(preparedStatementMock, times(1)).executeQuery();
}
@Test
public void testSearchRemovedByVmsNoVms() {
Assert.assertTrue(CollectionUtils.isEmpty(volumeDao.searchRemovedByVms(
new ArrayList<>(), 100L)));
Assert.assertTrue(CollectionUtils.isEmpty(volumeDao.searchRemovedByVms(
null, 100L)));
}
@Test
public void testSearchRemovedByVms() {
SearchBuilder<VolumeVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<VolumeVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doReturn(new ArrayList<>()).when(volumeDao).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
Mockito.when(volumeDao.createSearchBuilder()).thenReturn(sb);
final VolumeVO mockedVO = Mockito.mock(VolumeVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
volumeDao.searchRemovedByVms(List.of(1L, 2L), batchSize);
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(volumeDao, Mockito.times(1)).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class ItWorkDaoImplTest {
@Spy
ItWorkDaoImpl itWorkDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, itWorkDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, itWorkDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<ItWorkVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<ItWorkVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(itWorkDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(itWorkDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final ItWorkVO mockedVO = Mockito.mock(ItWorkVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), itWorkDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(itWorkDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.ConsoleSessionVO;
@RunWith(MockitoJUnitRunner.class)
public class ConsoleSessionDaoImplTest {
@Spy
ConsoleSessionDaoImpl consoleSessionDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, consoleSessionDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, consoleSessionDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<ConsoleSessionVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<ConsoleSessionVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(consoleSessionDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(consoleSessionDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final ConsoleSessionVO mockedVO = Mockito.mock(ConsoleSessionVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), consoleSessionDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(consoleSessionDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,69 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.NicVO;
@RunWith(MockitoJUnitRunner.class)
public class NicDaoImplTest {
@Spy
NicDaoImpl nicDaoImplSpy;
@Test
public void testSearchRemovedByVmsNoVms() {
Assert.assertTrue(CollectionUtils.isEmpty(nicDaoImplSpy.searchRemovedByVms(
new ArrayList<>(), 100L)));
Assert.assertTrue(CollectionUtils.isEmpty(nicDaoImplSpy.searchRemovedByVms(
null, 100L)));
}
@Test
public void testSearchRemovedByVms() {
SearchBuilder<NicVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<NicVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doReturn(new ArrayList<>()).when(nicDaoImplSpy).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
Mockito.when(nicDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final NicVO mockedVO = Mockito.mock(NicVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
nicDaoImplSpy.searchRemovedByVms(List.of(1L, 2L), batchSize);
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(nicDaoImplSpy, Mockito.times(1)).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.NicExtraDhcpOptionVO;
@RunWith(MockitoJUnitRunner.class)
public class NicExtraDhcpOptionDaoImplTest {
@Spy
NicExtraDhcpOptionDaoImpl nicExtraDhcpOptionDaoImplSpy;
@Test
public void testExpungeByNicListNoVms() {
Assert.assertEquals(0, nicExtraDhcpOptionDaoImplSpy.expungeByNicList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, nicExtraDhcpOptionDaoImplSpy.expungeByNicList(
null, 100L));
}
@Test
public void testExpungeByNicList() {
SearchBuilder<NicExtraDhcpOptionVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<NicExtraDhcpOptionVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(nicExtraDhcpOptionDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(nicExtraDhcpOptionDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final NicExtraDhcpOptionVO mockedVO = Mockito.mock(NicExtraDhcpOptionVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), nicExtraDhcpOptionDaoImplSpy.expungeByNicList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("nicIds", array);
Mockito.verify(nicExtraDhcpOptionDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class NicSecondaryIpDaoImplTest {
@Spy
NicSecondaryIpDaoImpl nicSecondaryIpDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, nicSecondaryIpDaoImplSpy.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, nicSecondaryIpDaoImplSpy.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<NicSecondaryIpVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<NicSecondaryIpVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(nicSecondaryIpDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(nicSecondaryIpDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final NicSecondaryIpVO mockedVO = Mockito.mock(NicSecondaryIpVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), nicSecondaryIpDaoImplSpy.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(nicSecondaryIpDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -30,6 +30,8 @@ import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import org.joda.time.DateTime;
@ -37,10 +39,14 @@ import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.MockitoAnnotations;
import org.mockito.Spy;
import com.cloud.utils.Pair;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
@ -199,4 +205,29 @@ public class VMInstanceDaoImplTest {
assertTrue(result);
}
@Test
public void testSearchRemovedByRemoveDate() {
SearchBuilder<VMInstanceVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<VMInstanceVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.when(vmInstanceDao.createSearchBuilder()).thenReturn(sb);
final VMInstanceVO mockedVO = Mockito.mock(VMInstanceVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
Mockito.doReturn(new ArrayList<>()).when(vmInstanceDao).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
Calendar cal = Calendar.getInstance();
Date endDate = new Date();
cal.setTime(endDate);
cal.add(Calendar.DATE, -1 * 10);
Date startDate = cal.getTime();
vmInstanceDao.searchRemovedByRemoveDate(startDate, endDate, 50L, new ArrayList<>());
Mockito.verify(sc).setParameters("startDate", startDate);
Mockito.verify(sc).setParameters("endDate", endDate);
Mockito.verify(sc, Mockito.never()).setParameters(Mockito.eq("skippedVmIds"), Mockito.any());
Mockito.verify(vmInstanceDao, Mockito.times(1)).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
}
}

View File

@ -0,0 +1,69 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.vm.snapshot.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.vm.snapshot.VMSnapshotVO;
@RunWith(MockitoJUnitRunner.class)
public class VMSnapshotDaoImplTest {
@Spy
VMSnapshotDaoImpl vmSnapshotDaoImplSpy;
@Test
public void testSearchRemovedByVmsNoVms() {
Assert.assertTrue(CollectionUtils.isEmpty(vmSnapshotDaoImplSpy.searchRemovedByVms(
new ArrayList<>(), 100L)));
Assert.assertTrue(CollectionUtils.isEmpty(vmSnapshotDaoImplSpy.searchRemovedByVms(
null, 100L)));
}
@Test
public void testSearchRemovedByVms() {
SearchBuilder<VMSnapshotVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<VMSnapshotVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doReturn(new ArrayList<>()).when(vmSnapshotDaoImplSpy).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
Mockito.when(vmSnapshotDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final VMSnapshotVO mockedVO = Mockito.mock(VMSnapshotVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
vmSnapshotDaoImplSpy.searchRemovedByVms(List.of(1L, 2L), batchSize);
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(vmSnapshotDaoImplSpy, Mockito.times(1)).searchIncludingRemoved(
Mockito.any(SearchCriteria.class), Mockito.any(Filter.class), Mockito.eq(null),
Mockito.eq(false));
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.storage.datastore.db;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class SnapshotDataStoreDaoImplTest {
@Spy
SnapshotDataStoreDaoImpl snapshotDataStoreDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, snapshotDataStoreDaoImplSpy.expungeBySnapshotList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, snapshotDataStoreDaoImplSpy.expungeBySnapshotList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<SnapshotDataStoreVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<SnapshotDataStoreVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(snapshotDataStoreDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(snapshotDataStoreDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final SnapshotDataStoreVO mockedVO = Mockito.mock(SnapshotDataStoreVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), snapshotDataStoreDaoImplSpy.expungeBySnapshotList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("snapshotIds", array);
Mockito.verify(snapshotDataStoreDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -407,4 +407,16 @@ public class VolumeDataStoreDaoImpl extends GenericDaoBase<VolumeDataStoreVO, Lo
}
return true;
}
@Override
public int expungeByVolumeList(List<Long> volumeIds, Long batchSize) {
if (CollectionUtils.isEmpty(volumeIds)) {
return 0;
}
SearchBuilder<VolumeDataStoreVO> sb = createSearchBuilder();
sb.and("volumeIds", sb.entity().getVolumeId(), SearchCriteria.Op.IN);
SearchCriteria<VolumeDataStoreVO> sc = sb.create();
sc.setParameters("volumeIds", volumeIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.storage.image.db;
import java.util.ArrayList;
import java.util.List;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreVO;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class VolumeDataStoreDaoImplTest {
@Spy
VolumeDataStoreDaoImpl volumeDataStoreDaoImplSpy;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, volumeDataStoreDaoImplSpy.expungeByVolumeList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, volumeDataStoreDaoImplSpy.expungeByVolumeList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<VolumeDataStoreVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<VolumeDataStoreVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(volumeDataStoreDaoImplSpy).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(volumeDataStoreDaoImplSpy.createSearchBuilder()).thenReturn(sb);
final VolumeDataStoreVO mockedVO = Mockito.mock(VolumeDataStoreVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), volumeDataStoreDaoImplSpy.expungeByVolumeList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("volumeIds", array);
Mockito.verify(volumeDataStoreDaoImplSpy, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -229,6 +229,24 @@ public interface GenericDao<T, ID extends Serializable> {
*/
int expunge(final SearchCriteria<T> sc);
/**
* remove the entity bean specified by the search criteria and filter
* @param sc
* @param filter
* @return number of rows deleted
*/
int expunge(final SearchCriteria<T> sc, final Filter filter);
/**
* remove the entity bean specified by the search criteria and batchSize
* @param sc
* @param batchSize
* @return number of rows deleted
*/
int batchExpunge(final SearchCriteria<T> sc, final Long batchSize);
int expungeList(List<ID> ids);
/**
* expunge the removed rows.
*/

View File

@ -20,6 +20,8 @@ import java.io.Serializable;
import java.io.UnsupportedEncodingException;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.math.BigInteger;
@ -59,9 +61,12 @@ import javax.persistence.Enumerated;
import javax.persistence.Table;
import javax.persistence.TableGenerator;
import com.amazonaws.util.CollectionUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import com.amazonaws.util.CollectionUtils;
import com.cloud.utils.DateUtil;
import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair;
@ -74,8 +79,6 @@ import com.cloud.utils.db.SearchCriteria.SelectType;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.Ip;
import com.cloud.utils.net.NetUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.exception.ExceptionUtils;
import net.sf.cglib.proxy.Callback;
import net.sf.cglib.proxy.CallbackFilter;
@ -1234,7 +1237,7 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
// FIXME: Does not work for joins.
@Override
public int expunge(final SearchCriteria<T> sc) {
public int expunge(final SearchCriteria<T> sc, final Filter filter) {
if (sc == null) {
throw new CloudRuntimeException("Call to throw new expunge with null search Criteria");
}
@ -1246,6 +1249,7 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
if (sc != null && sc.getWhereClause().length() > 0) {
str.append(sc.getWhereClause());
}
addFilter(str, filter);
final String sql = str.toString();
@ -1264,6 +1268,47 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
throw new CloudRuntimeException("Caught: " + pstmt, e);
}
}
@Override
public int expunge(final SearchCriteria<T> sc) {
return expunge(sc, null);
}
@Override
public int batchExpunge(final SearchCriteria<T> sc, final Long batchSize) {
Filter filter = null;
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
if (batchSizeFinal > 0) {
filter = new Filter(batchSizeFinal);
}
int expunged = 0;
int currentExpunged = 0;
do {
currentExpunged = expunge(sc, filter);
expunged += currentExpunged;
} while (batchSizeFinal > 0 && currentExpunged >= batchSizeFinal);
return expunged;
}
@Override
public int expungeList(final List<ID> ids) {
if (org.apache.commons.collections.CollectionUtils.isEmpty(ids)) {
return 0;
}
SearchBuilder<T> sb = createSearchBuilder();
Object obj = null;
try {
Method m = sb.entity().getClass().getMethod("getId");
obj = m.invoke(sb.entity());
} catch (NoSuchMethodException | InvocationTargetException | IllegalAccessException ignored) {}
if (obj == null) {
logger.warn(String.format("Unable to get ID object for entity: %s", _entityBeanType.getSimpleName()));
return 0;
}
sb.and("id", obj, SearchCriteria.Op.IN);
SearchCriteria<T> sc = sb.create();
sc.setParameters("id", ids.toArray());
return expunge(sc);
}
@DB()
protected StringBuilder createPartialSelectSql(SearchCriteria<?> sc, final boolean whereClause, final boolean enableQueryCache) {

View File

@ -39,4 +39,5 @@ public interface VmWorkJobDao extends GenericDao<VmWorkJobVO, Long> {
void expungeCompletedWorkJobs(Date cutDate);
void expungeLeftoverWorkJobs(long msid);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -24,10 +24,10 @@ import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.cloudstack.framework.jobs.impl.VmWorkJobVO;
import org.apache.cloudstack.framework.jobs.impl.VmWorkJobVO.Step;
import org.apache.cloudstack.jobs.JobInfo;
import org.apache.commons.collections.CollectionUtils;
import com.cloud.utils.DateUtil;
import com.cloud.utils.db.Filter;
@ -212,4 +212,16 @@ public class VmWorkJobDaoImpl extends GenericDaoBase<VmWorkJobVO, Long> implemen
}
});
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<VmWorkJobVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getVmInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<VmWorkJobVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.framework.jobs.dao;
import java.util.ArrayList;
import java.util.List;
import org.apache.cloudstack.framework.jobs.impl.VmWorkJobVO;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class VmWorkJobDaoImplTest {
@Spy
VmWorkJobDaoImpl vmWorkJobDaoImpl;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, vmWorkJobDaoImpl.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, vmWorkJobDaoImpl.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<VmWorkJobVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<VmWorkJobVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(vmWorkJobDaoImpl).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(vmWorkJobDaoImpl.createSearchBuilder()).thenReturn(sb);
final VmWorkJobVO mockedVO = Mockito.mock(VmWorkJobVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), vmWorkJobDaoImpl.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(vmWorkJobDaoImpl, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -224,4 +224,8 @@ public class ElasticLoadBalancerElement extends AdapterBase implements LoadBalan
return true;
}
@Override
public void expungeLbVmRefs(List<Long> vmIds, Long batchSize) {
_lbMgr.expungeLbVmRefs(vmIds, batchSize);
}
}

View File

@ -37,4 +37,6 @@ public interface ElasticLoadBalancerManager {
NetworkRuleConflictException;
public void handleDeleteLoadBalancerRule(LoadBalancer lb, long callerUserId, Account caller);
void expungeLbVmRefs(List<Long> vmIds, Long batchSize);
}

View File

@ -599,4 +599,8 @@ public class ElasticLoadBalancerManagerImpl extends ManagerBase implements Elast
public void finalizeUnmanage(VirtualMachine vm) {
}
@Override
public void expungeLbVmRefs(List<Long> vmIds, Long batchSize) {
_elbVmMapDao.expungeByLbVmList(vmIds, batchSize);
}
}

View File

@ -40,4 +40,6 @@ public interface ElasticLbVmMapDao extends GenericDao<ElasticLbVmMapVO, Long> {
List<LoadBalancerVO> listLbsForElbVm(long elbVmId);
int expungeByLbVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -21,6 +21,7 @@ import java.util.List;
import javax.annotation.PostConstruct;
import javax.inject.Inject;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.network.ElasticLbVmMapVO;
@ -136,4 +137,16 @@ public class ElasticLbVmMapDaoImpl extends GenericDaoBase<ElasticLbVmMapVO, Long
return _loadbalancerDao.search(sc, null);
}
@Override
public int expungeByLbVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<ElasticLbVmMapVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getElbVmId(), SearchCriteria.Op.IN);
SearchCriteria<ElasticLbVmMapVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -19,35 +19,35 @@ package com.cloud.api.query.dao;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import com.cloud.dc.VsphereStoragePolicyVO;
import com.cloud.dc.dao.VsphereStoragePolicyDao;
import com.cloud.user.AccountManager;
import com.cloud.utils.db.TransactionLegacy;
import javax.inject.Inject;
import org.apache.cloudstack.annotation.AnnotationService;
import org.apache.cloudstack.annotation.dao.AnnotationDao;
import com.cloud.storage.DiskOfferingVO;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.response.ServiceOfferingResponse;
import org.apache.cloudstack.context.CallContext;
import org.apache.cloudstack.framework.config.dao.ConfigurationDao;
import org.apache.commons.lang3.StringUtils;
import org.springframework.stereotype.Component;
import com.cloud.api.ApiDBUtils;
import com.cloud.api.query.vo.ServiceOfferingJoinVO;
import com.cloud.dc.VsphereStoragePolicyVO;
import com.cloud.dc.dao.VsphereStoragePolicyDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.server.ResourceTag.ResourceObjectType;
import com.cloud.storage.DiskOfferingVO;
import com.cloud.user.AccountManager;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import javax.inject.Inject;
import com.cloud.utils.db.TransactionLegacy;
@Component
public class ServiceOfferingJoinDaoImpl extends GenericDaoBase<ServiceOfferingJoinVO, Long> implements ServiceOfferingJoinDao {
@ -167,6 +167,10 @@ public class ServiceOfferingJoinDaoImpl extends GenericDaoBase<ServiceOfferingJo
if (vsphereStoragePolicyVO != null)
offeringResponse.setVsphereStoragePolicy(vsphereStoragePolicyVO.getName());
}
String purgeResource = offeringDetails.get(ServiceOffering.PURGE_DB_ENTITIES_KEY);
if (StringUtils.isNotBlank(purgeResource)) {
offeringResponse.setPurgeResources(Boolean.parseBoolean(purgeResource));
}
}
long rootDiskSizeInGb = (long) offering.getRootDiskSize() / GB_TO_BYTES;

View File

@ -3187,7 +3187,7 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
cmd.getIopsReadRate(), cmd.getIopsReadRateMax(), cmd.getIopsReadRateMaxLength(),
cmd.getIopsWriteRate(), cmd.getIopsWriteRateMax(), cmd.getIopsWriteRateMaxLength(),
cmd.getHypervisorSnapshotReserve(), cmd.getCacheMode(), storagePolicyId, cmd.getDynamicScalingEnabled(), diskOfferingId,
cmd.getDiskOfferingStrictness(), cmd.isCustomized(), cmd.getEncryptRoot());
cmd.getDiskOfferingStrictness(), cmd.isCustomized(), cmd.getEncryptRoot(), cmd.isPurgeResources());
}
protected ServiceOfferingVO createServiceOffering(final long userId, final boolean isSystem, final VirtualMachine.Type vmType,
@ -3198,8 +3198,9 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
Long bytesWriteRate, Long bytesWriteRateMax, Long bytesWriteRateMaxLength,
Long iopsReadRate, Long iopsReadRateMax, Long iopsReadRateMaxLength,
Long iopsWriteRate, Long iopsWriteRateMax, Long iopsWriteRateMaxLength,
final Integer hypervisorSnapshotReserve, String cacheMode, final Long storagePolicyID, final boolean dynamicScalingEnabled, final Long diskOfferingId,
final boolean diskOfferingStrictness, final boolean isCustomized, final boolean encryptRoot) {
final Integer hypervisorSnapshotReserve, String cacheMode, final Long storagePolicyID,
final boolean dynamicScalingEnabled, final Long diskOfferingId, final boolean diskOfferingStrictness,
final boolean isCustomized, final boolean encryptRoot, final boolean purgeResources) {
// Filter child domains when both parent and child domains are present
List<Long> filteredDomainIds = filterChildSubDomains(domainIds);
@ -3234,7 +3235,7 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
limitResourceUse, volatileVm, displayText, isSystem, vmType,
hostTag, deploymentPlanner, dynamicScalingEnabled, isCustomized);
List<ServiceOfferingDetailsVO> detailsVO = new ArrayList<ServiceOfferingDetailsVO>();
List<ServiceOfferingDetailsVO> detailsVOList = new ArrayList<ServiceOfferingDetailsVO>();
if (details != null) {
// To have correct input, either both gpu card name and VGPU type should be passed or nothing should be passed.
// Use XOR condition to verify that.
@ -3268,12 +3269,16 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
// Add in disk offering details
continue;
}
detailsVO.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), detailEntry.getKey(), detailEntryValue, true));
detailsVOList.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), detailEntry.getKey(), detailEntryValue, true));
}
}
if (storagePolicyID != null) {
detailsVO.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.STORAGE_POLICY, String.valueOf(storagePolicyID), false));
detailsVOList.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.STORAGE_POLICY, String.valueOf(storagePolicyID), false));
}
if (purgeResources) {
detailsVOList.add(new ServiceOfferingDetailsVO(serviceOffering.getId(),
ServiceOffering.PURGE_DB_ENTITIES_KEY, Boolean.TRUE.toString(), false));
}
serviceOffering.setDiskOfferingStrictness(diskOfferingStrictness);
@ -3303,18 +3308,18 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
if ((serviceOffering = _serviceOfferingDao.persist(serviceOffering)) != null) {
for (Long domainId : filteredDomainIds) {
detailsVO.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.DOMAIN_ID, String.valueOf(domainId), false));
detailsVOList.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.DOMAIN_ID, String.valueOf(domainId), false));
}
if (CollectionUtils.isNotEmpty(zoneIds)) {
for (Long zoneId : zoneIds) {
detailsVO.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.ZONE_ID, String.valueOf(zoneId), false));
detailsVOList.add(new ServiceOfferingDetailsVO(serviceOffering.getId(), ApiConstants.ZONE_ID, String.valueOf(zoneId), false));
}
}
if (CollectionUtils.isNotEmpty(detailsVO)) {
for (ServiceOfferingDetailsVO detail : detailsVO) {
if (CollectionUtils.isNotEmpty(detailsVOList)) {
for (ServiceOfferingDetailsVO detail : detailsVOList) {
detail.setResourceId(serviceOffering.getId());
}
_serviceOfferingDetailsDao.saveDetails(detailsVO);
_serviceOfferingDetailsDao.saveDetails(detailsVOList);
}
CallContext.current().setEventDetails("Service offering id=" + serviceOffering.getId());
@ -3474,6 +3479,7 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
String storageTags = cmd.getStorageTags();
String hostTags = cmd.getHostTags();
ServiceOffering.State state = cmd.getState();
boolean purgeResources = cmd.isPurgeResources();
if (userId == null) {
userId = Long.valueOf(User.UID_SYSTEM);
@ -3491,6 +3497,12 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
List<Long> existingZoneIds = _serviceOfferingDetailsDao.findZoneIds(id);
Collections.sort(existingZoneIds);
String purgeResourceStr = _serviceOfferingDetailsDao.getDetail(id, ServiceOffering.PURGE_DB_ENTITIES_KEY);
boolean existingPurgeResources = false;
if (StringUtils.isNotBlank(purgeResourceStr)) {
existingPurgeResources = Boolean.parseBoolean(purgeResourceStr);
}
// check if valid domain
if (CollectionUtils.isNotEmpty(domainIds)) {
for (final Long domainId: domainIds) {
@ -3559,7 +3571,8 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
}
final boolean updateNeeded = name != null || displayText != null || sortKey != null || storageTags != null || hostTags != null || state != null;
final boolean detailsUpdateNeeded = !filteredDomainIds.equals(existingDomainIds) || !filteredZoneIds.equals(existingZoneIds);
final boolean detailsUpdateNeeded = !filteredDomainIds.equals(existingDomainIds) ||
!filteredZoneIds.equals(existingZoneIds) || purgeResources != existingPurgeResources;
if (!updateNeeded && !detailsUpdateNeeded) {
return _serviceOfferingDao.findById(id);
}
@ -3618,6 +3631,14 @@ public class ConfigurationManagerImpl extends ManagerBase implements Configurati
detailsVO.add(new ServiceOfferingDetailsVO(id, ApiConstants.ZONE_ID, String.valueOf(zoneId), false));
}
}
if (purgeResources != existingPurgeResources) {
sc.setParameters("detailName", ServiceOffering.PURGE_DB_ENTITIES_KEY);
_serviceOfferingDetailsDao.remove(sc);
if (purgeResources) {
detailsVO.add(new ServiceOfferingDetailsVO(id, ServiceOffering.PURGE_DB_ENTITIES_KEY,
"true", false));
}
}
}
if (!detailsVO.isEmpty()) {
for (ServiceOfferingDetailsVO detailVO : detailsVO) {

View File

@ -1092,4 +1092,9 @@ public class HighAvailabilityManagerImpl extends ManagerBase implements Configur
StopRetryInterval, RestartRetryInterval, MigrateRetryInterval, InvestigateRetryInterval,
HAWorkers, ForceHA, KvmHAFenceHostIfHeartbeatFailsOnStorage};
}
@Override
public int expungeWorkItemsByVmList(List<Long> vmIds, Long batchSize) {
return _haDao.expungeByVmList(vmIds, batchSize);
}
}

View File

@ -85,4 +85,5 @@ public interface HighAvailabilityDao extends GenericDao<HaWorkVO, Long> {
List<HaWorkVO> listPendingHaWorkForVm(long vmId);
List<HaWorkVO> listPendingMigrationsForVm(long vmId);
int expungeByVmList(List<Long> vmIds, Long batchSize);
}

View File

@ -19,7 +19,7 @@ package com.cloud.ha.dao;
import java.util.Date;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import org.springframework.stereotype.Component;
import com.cloud.ha.HaWorkVO;
@ -258,4 +258,16 @@ public class HighAvailabilityDaoImpl extends GenericDaoBase<HaWorkVO, Long> impl
return update(vo, sc);
}
@Override
public int expungeByVmList(List<Long> vmIds, Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
SearchBuilder<HaWorkVO> sb = createSearchBuilder();
sb.and("vmIds", sb.entity().getInstanceId(), SearchCriteria.Op.IN);
SearchCriteria<HaWorkVO> sc = sb.create();
sc.setParameters("vmIds", vmIds.toArray());
return batchExpunge(sc, batchSize);
}
}

View File

@ -54,11 +54,6 @@ import javax.naming.ConfigurationException;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.ParserConfigurationException;
import com.cloud.kubernetes.cluster.KubernetesServiceHelper;
import com.cloud.network.dao.NsxProviderDao;
import com.cloud.network.element.NsxProviderVO;
import com.cloud.user.AccountVO;
import com.cloud.utils.exception.ExceptionProxyObject;
import org.apache.cloudstack.acl.ControlledEntity;
import org.apache.cloudstack.acl.ControlledEntity.ACLType;
import org.apache.cloudstack.acl.SecurityChecker.AccessType;
@ -247,6 +242,7 @@ import com.cloud.host.dao.HostDao;
import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
import com.cloud.hypervisor.kvm.dpdk.DpdkHelper;
import com.cloud.kubernetes.cluster.KubernetesServiceHelper;
import com.cloud.network.IpAddressManager;
import com.cloud.network.Network;
import com.cloud.network.Network.GuestType;
@ -265,7 +261,9 @@ import com.cloud.network.dao.LoadBalancerVMMapVO;
import com.cloud.network.dao.NetworkDao;
import com.cloud.network.dao.NetworkServiceMapDao;
import com.cloud.network.dao.NetworkVO;
import com.cloud.network.dao.NsxProviderDao;
import com.cloud.network.dao.PhysicalNetworkDao;
import com.cloud.network.element.NsxProviderVO;
import com.cloud.network.element.UserDataServiceProvider;
import com.cloud.network.guru.NetworkGuru;
import com.cloud.network.lb.LoadBalancingRulesManager;
@ -334,6 +332,7 @@ import com.cloud.template.VirtualMachineTemplate;
import com.cloud.user.Account;
import com.cloud.user.AccountManager;
import com.cloud.user.AccountService;
import com.cloud.user.AccountVO;
import com.cloud.user.ResourceLimitService;
import com.cloud.user.SSHKeyPairVO;
import com.cloud.user.User;
@ -368,6 +367,7 @@ import com.cloud.utils.db.TransactionCallbackWithExceptionNoReturn;
import com.cloud.utils.db.TransactionStatus;
import com.cloud.utils.db.UUIDManager;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.exception.ExceptionProxyObject;
import com.cloud.utils.exception.ExecutionException;
import com.cloud.utils.fsm.NoTransitionException;
import com.cloud.utils.net.Ip;

View File

@ -0,0 +1,827 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.resource;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import javax.inject.Inject;
import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.api.command.admin.resource.PurgeExpungedResourcesCmd;
import org.apache.cloudstack.engine.orchestration.service.NetworkOrchestrationService;
import org.apache.cloudstack.framework.async.AsyncCallFuture;
import org.apache.cloudstack.framework.async.AsyncCallbackDispatcher;
import org.apache.cloudstack.framework.async.AsyncCompletionCallback;
import org.apache.cloudstack.framework.async.AsyncRpcContext;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.Configurable;
import org.apache.cloudstack.framework.jobs.dao.VmWorkJobDao;
import org.apache.cloudstack.managed.context.ManagedContextRunnable;
import org.apache.cloudstack.storage.command.CommandResult;
import org.apache.cloudstack.storage.datastore.db.SnapshotDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao;
import org.apache.cloudstack.utils.identity.ManagementServerNode;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.EnumUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import com.cloud.cluster.ManagementServerHostVO;
import com.cloud.cluster.dao.ManagementServerHostDao;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.ha.HighAvailabilityManager;
import com.cloud.network.as.dao.AutoScaleVmGroupVmMapDao;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.dao.InlineLoadBalancerNicMapDao;
import com.cloud.network.dao.LoadBalancerVMMapDao;
import com.cloud.network.dao.OpRouterMonitorServiceDao;
import com.cloud.network.rules.dao.PortForwardingRulesDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.secstorage.CommandExecLogDao;
import com.cloud.service.dao.ServiceOfferingDetailsDao;
import com.cloud.storage.SnapshotVO;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.SnapshotDao;
import com.cloud.storage.dao.SnapshotDetailsDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.dao.VolumeDetailsDao;
import com.cloud.utils.Pair;
import com.cloud.utils.component.ManagerBase;
import com.cloud.utils.component.PluggableService;
import com.cloud.utils.concurrency.NamedThreadFactory;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GlobalLock;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
import com.cloud.utils.db.TransactionCallbackWithException;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.vm.ItWorkDao;
import com.cloud.vm.NicVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.dao.ConsoleSessionDao;
import com.cloud.vm.dao.NicDao;
import com.cloud.vm.dao.NicDetailsDao;
import com.cloud.vm.dao.NicExtraDhcpOptionDao;
import com.cloud.vm.dao.NicSecondaryIpDao;
import com.cloud.vm.dao.UserVmDetailsDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.vm.snapshot.VMSnapshotVO;
import com.cloud.vm.snapshot.dao.VMSnapshotDao;
import com.cloud.vm.snapshot.dao.VMSnapshotDetailsDao;
public class ResourceCleanupServiceImpl extends ManagerBase implements ResourceCleanupService, PluggableService,
Configurable {
@Inject
VMInstanceDao vmInstanceDao;
@Inject
VolumeDao volumeDao;
@Inject
VolumeDetailsDao volumeDetailsDao;
@Inject
VolumeDataStoreDao volumeDataStoreDao;
@Inject
SnapshotDao snapshotDao;
@Inject
SnapshotDetailsDao snapshotDetailsDao;
@Inject
SnapshotDataStoreDao snapshotDataStoreDao;
@Inject
NicDao nicDao;
@Inject
NicDetailsDao nicDetailsDao;
@Inject
NicExtraDhcpOptionDao nicExtraDhcpOptionDao;
@Inject
InlineLoadBalancerNicMapDao inlineLoadBalancerNicMapDao;
@Inject
UserVmDetailsDao userVmDetailsDao;
@Inject
VMSnapshotDao vmSnapshotDao;
@Inject
VMSnapshotDetailsDao vmSnapshotDetailsDao;
@Inject
AutoScaleVmGroupVmMapDao autoScaleVmGroupVmMapDao;
@Inject
CommandExecLogDao commandExecLogDao;
@Inject
NetworkOrchestrationService networkOrchestrationService;
@Inject
LoadBalancerVMMapDao loadBalancerVMMapDao;
@Inject
NicSecondaryIpDao nicSecondaryIpDao;
@Inject
HighAvailabilityManager highAvailabilityManager;
@Inject
ItWorkDao itWorkDao;
@Inject
OpRouterMonitorServiceDao opRouterMonitorServiceDao;
@Inject
PortForwardingRulesDao portForwardingRulesDao;
@Inject
IPAddressDao ipAddressDao;
@Inject
VmWorkJobDao vmWorkJobDao;
@Inject
ConsoleSessionDao consoleSessionDao;
@Inject
ManagementServerHostDao managementServerHostDao;
@Inject
ServiceOfferingDetailsDao serviceOfferingDetailsDao;
private ScheduledExecutorService expungedResourcesCleanupExecutor;
private ExecutorService purgeExpungedResourcesJobExecutor;
protected void purgeLinkedSnapshotEntities(final List<Long> snapshotIds, final Long batchSize) {
if (CollectionUtils.isEmpty(snapshotIds)) {
return;
}
snapshotDetailsDao.batchExpungeForResources(snapshotIds, batchSize);
snapshotDataStoreDao.expungeBySnapshotList(snapshotIds, batchSize);
// Snapshot policies are using ON DELETE CASCADE
}
protected long purgeVolumeSnapshots(final List<Long> volumeIds, final Long batchSize) {
if (CollectionUtils.isEmpty(volumeIds)) {
return 0;
}
SearchBuilder<SnapshotVO> sb = snapshotDao.createSearchBuilder();
sb.and("volumeIds", sb.entity().getVolumeId(), SearchCriteria.Op.IN);
sb.and("removed", sb.entity().getRemoved(), SearchCriteria.Op.NNULL);
SearchCriteria<SnapshotVO> sc = sb.create();
sc.setParameters("volumeIds", volumeIds.toArray());
int removed = 0;
long totalRemoved = 0;
Filter filter = new Filter(SnapshotVO.class, "id", true, 0L, batchSize);
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
do {
List<SnapshotVO> snapshots = snapshotDao.searchIncludingRemoved(sc, filter, null, false);
List<Long> snapshotIds = snapshots.stream().map(SnapshotVO::getId).collect(Collectors.toList());
purgeLinkedSnapshotEntities(snapshotIds, batchSize);
removed = snapshotDao.expungeList(snapshotIds);
totalRemoved += removed;
} while (batchSizeFinal > 0 && removed >= batchSizeFinal);
return totalRemoved;
}
protected void purgeLinkedVolumeEntities(final List<Long> volumeIds, final Long batchSize) {
if (CollectionUtils.isEmpty(volumeIds)) {
return;
}
volumeDetailsDao.batchExpungeForResources(volumeIds, batchSize);
volumeDataStoreDao.expungeByVolumeList(volumeIds, batchSize);
purgeVolumeSnapshots(volumeIds, batchSize);
}
protected long purgeVMVolumes(final List<Long> vmIds, final Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
int removed = 0;
long totalRemoved = 0;
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
do {
List<VolumeVO> volumes = volumeDao.searchRemovedByVms(vmIds, batchSize);
List<Long> volumeIds = volumes.stream().map(VolumeVO::getId).collect(Collectors.toList());
purgeLinkedVolumeEntities(volumeIds, batchSize);
removed = volumeDao.expungeList(volumeIds);
totalRemoved += removed;
} while (batchSizeFinal > 0 && removed >= batchSizeFinal);
return totalRemoved;
}
protected void purgeLinkedNicEntities(final List<Long> nicIds, final Long batchSize) {
if (CollectionUtils.isEmpty(nicIds)) {
return;
}
nicDetailsDao.batchExpungeForResources(nicIds, batchSize);
nicExtraDhcpOptionDao.expungeByNicList(nicIds, batchSize);
inlineLoadBalancerNicMapDao.expungeByNicList(nicIds, batchSize);
}
protected long purgeVMNics(final List<Long> vmIds, final Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
int removed = 0;
long totalRemoved = 0;
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
do {
List<NicVO> nics = nicDao.searchRemovedByVms(vmIds, batchSize);
List<Long> nicIds = nics.stream().map(NicVO::getId).collect(Collectors.toList());
purgeLinkedNicEntities(nicIds, batchSize);
removed = nicDao.expungeList(nicIds);
totalRemoved += removed;
} while (batchSizeFinal > 0 && removed >= batchSizeFinal);
return totalRemoved;
}
protected long purgeVMSnapshots(final List<Long> vmIds, final Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return 0;
}
int removed = 0;
long totalRemoved = 0;
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
do {
List<VMSnapshotVO> vmSnapshots = vmSnapshotDao.searchRemovedByVms(vmIds, batchSize);
List<Long> ids = vmSnapshots.stream().map(VMSnapshotVO::getId).collect(Collectors.toList());
vmSnapshotDetailsDao.batchExpungeForResources(ids, batchSize);
removed = vmSnapshotDao.expungeList(ids);
totalRemoved += removed;
} while (batchSizeFinal > 0 && removed >= batchSizeFinal);
return totalRemoved;
}
protected void purgeLinkedVMEntities(final List<Long> vmIds, final Long batchSize) {
if (CollectionUtils.isEmpty(vmIds)) {
return;
}
purgeVMVolumes(vmIds, batchSize);
purgeVMNics(vmIds, batchSize);
userVmDetailsDao.batchExpungeForResources(vmIds, batchSize);
purgeVMSnapshots(vmIds, batchSize);
autoScaleVmGroupVmMapDao.expungeByVmList(vmIds, batchSize);
commandExecLogDao.expungeByVmList(vmIds, batchSize);
networkOrchestrationService.expungeLbVmRefs(vmIds, batchSize);
loadBalancerVMMapDao.expungeByVmList(vmIds, batchSize);
nicSecondaryIpDao.expungeByVmList(vmIds, batchSize);
highAvailabilityManager.expungeWorkItemsByVmList(vmIds, batchSize);
itWorkDao.expungeByVmList(vmIds, batchSize);
opRouterMonitorServiceDao.expungeByVmList(vmIds, batchSize);
portForwardingRulesDao.expungeByVmList(vmIds, batchSize);
ipAddressDao.expungeByVmList(vmIds, batchSize);
vmWorkJobDao.expungeByVmList(vmIds, batchSize);
consoleSessionDao.expungeByVmList(vmIds, batchSize);
}
protected HashSet<Long> getVmIdsWithActiveVolumeSnapshots(List<Long> vmIds) {
if (CollectionUtils.isEmpty(vmIds)) {
return new HashSet<>();
}
List<VolumeVO> volumes = volumeDao.searchRemovedByVms(vmIds, null);
List<Long> volumeIds = volumes.stream().map(VolumeVO::getId).collect(Collectors.toList());
List<SnapshotVO> activeSnapshots = snapshotDao.searchByVolumes(volumeIds);
HashSet<Long> activeSnapshotVolumeIds =
activeSnapshots.stream().map(SnapshotVO::getVolumeId).collect(Collectors.toCollection(HashSet::new));
List<VolumeVO> volumesWithActiveSnapshots =
volumes.stream().filter(v -> activeSnapshotVolumeIds.contains(v.getId())).collect(Collectors.toList());
return volumesWithActiveSnapshots.stream().map(VolumeVO::getInstanceId)
.collect(Collectors.toCollection(HashSet::new));
}
protected Pair<List<Long>, List<Long>> getFilteredVmIdsForSnapshots(List<Long> vmIds) {
HashSet<Long> currentSkippedVmIds = new HashSet<>();
List<VMSnapshotVO> activeSnapshots = vmSnapshotDao.searchByVms(vmIds);
if (CollectionUtils.isNotEmpty(activeSnapshots)) {
HashSet<Long> vmIdsWithActiveSnapshots = activeSnapshots.stream().map(VMSnapshotVO::getVmId)
.collect(Collectors.toCollection(HashSet::new));
if (logger.isDebugEnabled()) {
logger.debug(String.format("Skipping purging VMs with IDs %s as they have active " +
"VM snapshots", StringUtils.join(vmIdsWithActiveSnapshots)));
}
currentSkippedVmIds.addAll(vmIdsWithActiveSnapshots);
}
HashSet<Long> vmIdsWithActiveVolumeSnapshots = getVmIdsWithActiveVolumeSnapshots(vmIds);
if (CollectionUtils.isNotEmpty(vmIdsWithActiveVolumeSnapshots)) {
if (logger.isDebugEnabled()) {
logger.debug(String.format("Skipping purging VMs with IDs %s as they have volumes with active " +
"snapshots", StringUtils.join(vmIdsWithActiveVolumeSnapshots)));
}
currentSkippedVmIds.addAll(vmIdsWithActiveVolumeSnapshots);
}
if (CollectionUtils.isNotEmpty(currentSkippedVmIds)) {
vmIds.removeAll(currentSkippedVmIds);
}
return new Pair<>(vmIds, new ArrayList<>(currentSkippedVmIds));
}
protected Pair<List<Long>, List<Long>> getVmIdsWithNoActiveSnapshots(final Date startDate, final Date endDate,
final Long batchSize, final List<Long> skippedVmIds) {
List<VMInstanceVO> vms = vmInstanceDao.searchRemovedByRemoveDate(startDate, endDate, batchSize, skippedVmIds);
if (CollectionUtils.isEmpty(vms)) {
return new Pair<>(new ArrayList<>(), new ArrayList<>());
}
List<Long> vmIds = vms.stream().map(VMInstanceVO::getId).collect(Collectors.toList());
return getFilteredVmIdsForSnapshots(vmIds);
}
protected long purgeVMEntities(final Long batchSize, final Date startDate, final Date endDate) {
return Transaction.execute((TransactionCallbackWithException<Long, CloudRuntimeException>) status -> {
int count;
long totalRemoved = 0;
final long batchSizeFinal = ObjectUtils.defaultIfNull(batchSize, 0L);
List<Long> skippedVmIds = new ArrayList<>();
do {
Pair<List<Long>, List<Long>> allVmIds =
getVmIdsWithNoActiveSnapshots(startDate, endDate, batchSize, skippedVmIds);
List<Long> vmIds = allVmIds.first();
List<Long> currentSkippedVmIds = allVmIds.second();
count = vmIds.size() + currentSkippedVmIds.size();
skippedVmIds.addAll(currentSkippedVmIds);
purgeLinkedVMEntities(vmIds, batchSize);
totalRemoved += vmInstanceDao.expungeList(vmIds);
} while (batchSizeFinal > 0 && count >= batchSizeFinal);
if (logger.isTraceEnabled()) {
logger.trace(String.format("Purged total %d VM records", totalRemoved));
}
return totalRemoved;
});
}
protected boolean purgeVMEntity(final long vmId) {
return Transaction.execute((TransactionCallbackWithException<Boolean, CloudRuntimeException>) status -> {
final Long batchSize = ExpungedResourcesPurgeBatchSize.value().longValue();
List<Long> vmIds = new ArrayList<>();
vmIds.add(vmId);
Pair<List<Long>, List<Long>> allVmIds = getFilteredVmIdsForSnapshots(vmIds);
if (CollectionUtils.isEmpty(allVmIds.first())) {
return false;
}
purgeLinkedVMEntities(vmIds, batchSize);
return vmInstanceDao.expunge(vmId);
});
}
protected long purgeEntities(final List<ResourceType> resourceTypes, final Long batchSize,
final Date startDate, final Date endDate) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("Expunging entities with parameters - resourceType: %s, batchSize: %d, " +
"startDate: %s, endDate: %s", StringUtils.join(resourceTypes), batchSize, startDate, endDate));
}
long totalPurged = 0;
if (CollectionUtils.isEmpty(resourceTypes) || resourceTypes.contains(ResourceType.VirtualMachine)) {
totalPurged += purgeVMEntities(batchSize, startDate, endDate);
}
return totalPurged;
}
protected Void purgeExpungedResourcesCallback(
AsyncCallbackDispatcher<ResourceCleanupServiceImpl, PurgeExpungedResourcesResult> callback,
PurgeExpungedResourcesContext<PurgeExpungedResourcesResult> context) {
PurgeExpungedResourcesResult result = callback.getResult();
context.future.complete(result);
return null;
}
protected ResourceType getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(final String resourceTypeStr,
final Date startDate, final Date endDate, final Long batchSize) {
ResourceType resourceType = null;
if (StringUtils.isNotBlank(resourceTypeStr)) {
resourceType = EnumUtils.getEnumIgnoreCase(ResourceType.class, resourceTypeStr, null);
if (resourceType == null) {
throw new InvalidParameterValueException("Invalid resource type specified");
}
}
if (batchSize != null && batchSize <= 0) {
throw new InvalidParameterValueException(String.format("Invalid %s specified", ApiConstants.BATCH_SIZE));
}
if (endDate != null && startDate != null && endDate.before(startDate)) {
throw new InvalidParameterValueException(String.format("Invalid %s specified", ApiConstants.END_DATE));
}
return resourceType;
}
protected long purgeExpungedResourceUsingJob(final ResourceType resourceType, final Long batchSize,
final Date startDate, final Date endDate) {
AsyncCallFuture<PurgeExpungedResourcesResult> future = new AsyncCallFuture<>();
PurgeExpungedResourcesContext<PurgeExpungedResourcesResult> context =
new PurgeExpungedResourcesContext<>(null, future);
AsyncCallbackDispatcher<ResourceCleanupServiceImpl, PurgeExpungedResourcesResult> caller =
AsyncCallbackDispatcher.create(this);
caller.setCallback(caller.getTarget().purgeExpungedResourcesCallback(null, null))
.setContext(context);
PurgeExpungedResourceThread job = new PurgeExpungedResourceThread(resourceType, batchSize, startDate, endDate,
caller);
purgeExpungedResourcesJobExecutor.submit(job);
long expungedCount;
try {
PurgeExpungedResourcesResult result = future.get();
if (result.isFailed()) {
throw new CloudRuntimeException(String.format("Failed to purge expunged resources due to: %s", result.getResult()));
}
expungedCount = result.getPurgedCount();
} catch (InterruptedException | ExecutionException e) {
logger.error(String.format("Failed to purge expunged resources due to: %s", e.getMessage()), e);
throw new CloudRuntimeException("Failed to purge expunged resources");
}
return expungedCount;
}
protected boolean isVmOfferingPurgeResourcesEnabled(long vmServiceOfferingId) {
String detail =
serviceOfferingDetailsDao.getDetail(vmServiceOfferingId, ServiceOffering.PURGE_DB_ENTITIES_KEY);
return StringUtils.isNotBlank(detail) && Boolean.parseBoolean(detail);
}
protected boolean purgeExpungedResource(long resourceId, ResourceType resourceType) {
if (!ResourceType.VirtualMachine.equals(resourceType)) {
return false;
}
return purgeVMEntity(resourceId);
}
protected void purgeExpungedResourceLater(long resourceId, ResourceType resourceType) {
AsyncCallFuture<PurgeExpungedResourcesResult> future = new AsyncCallFuture<>();
PurgeExpungedResourcesContext<PurgeExpungedResourcesResult> context =
new PurgeExpungedResourcesContext<>(null, future);
AsyncCallbackDispatcher<ResourceCleanupServiceImpl, PurgeExpungedResourcesResult> caller =
AsyncCallbackDispatcher.create(this);
caller.setCallback(caller.getTarget().purgeExpungedResourcesCallback(null, null))
.setContext(context);
PurgeExpungedResourceThread job = new PurgeExpungedResourceThread(resourceId, resourceType, caller);
purgeExpungedResourcesJobExecutor.submit(job);
}
protected Date parseDateFromConfig(String configKey, String configValue) {
if (StringUtils.isBlank(configValue)) {
return null;
}
final List<String> dateFormats = List.of("yyyy-MM-dd HH:mm:ss", "yyyy-MM-dd");
Date date = null;
for (String format : dateFormats) {
final SimpleDateFormat dateFormat = new SimpleDateFormat(format);
try {
date = dateFormat.parse(configValue);
break;
} catch (ParseException e) {
logger.trace(String.format("Unable to parse value for config %s: %s with date " +
"format: %s due to %s", configKey, configValue, format, e.getMessage()));
}
}
if (date == null) {
throw new CloudRuntimeException(String.format("Unable to parse value for config %s: %s with date " +
"formats: %s", configKey, configValue, StringUtils.join(dateFormats)));
}
return date;
}
protected Date getStartDateFromConfig() {
return parseDateFromConfig(ExpungedResourcesPurgeStartTime.key(), ExpungedResourcesPurgeStartTime.value());
}
protected Date calculatePastDateFromConfig(String configKey, Integer configValue) {
if (configValue == null || configValue == 0) {
return null;
}
if (configValue < 0) {
throw new CloudRuntimeException(String.format("Unable to retrieve a valid value for config %s: %s",
configKey, configValue));
}
Calendar cal = Calendar.getInstance();
Date endDate = new Date();
cal.setTime(endDate);
cal.add(Calendar.DATE, -1 * configValue);
return cal.getTime();
}
protected Date getEndDateFromConfig() {
return calculatePastDateFromConfig(ExpungedResourcesPurgeKeepPastDays.key(),
ExpungedResourcesPurgeKeepPastDays.value());
}
protected List<ResourceType> getResourceTypesFromConfig() {
String resourceTypesConfig = ExpungedResourcePurgeResources.value();
if (StringUtils.isBlank(resourceTypesConfig)) {
return null;
}
List<ResourceType> resourceTypes = new ArrayList<>();
for (String type : resourceTypesConfig.split(",")) {
ResourceType resourceType = EnumUtils.getEnum(ResourceType.class, type.trim(), null);
if (resourceType == null) {
throw new CloudRuntimeException(String.format("Invalid resource type: '%s' specified in " +
"the config: %s", type, ExpungedResourcePurgeResources.key()));
}
resourceTypes.add(resourceType);
}
return resourceTypes;
}
protected long getBatchSizeFromConfig() {
Integer batchSize = ExpungedResourcesPurgeBatchSize.value();
if (batchSize == null || batchSize <= 0) {
throw new CloudRuntimeException(String.format("Unable to retrieve a valid value for config %s: %s",
ExpungedResourcesPurgeBatchSize.key(), batchSize));
}
return batchSize.longValue();
}
@Override
public long purgeExpungedResources(PurgeExpungedResourcesCmd cmd) {
final String resourceTypeStr = cmd.getResourceType();
final Date startDate = cmd.getStartDate();
final Date endDate = cmd.getEndDate();
Long batchSize = cmd.getBatchSize();
ResourceType resourceType = getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(resourceTypeStr,
startDate, endDate, batchSize);
Integer globalBatchSize = ExpungedResourcesPurgeBatchSize.value();
if (batchSize == null && globalBatchSize > 0) {
batchSize = globalBatchSize.longValue();
}
long expungedCount = purgeExpungedResourceUsingJob(resourceType, batchSize, startDate, endDate);
if (expungedCount <= 0) {
logger.debug("No resource expunged during purgeExpungedResources execution");
}
return expungedCount;
}
@Override
public void purgeExpungedVmResourcesLaterIfNeeded(VirtualMachine vm) {
if (!isVmOfferingPurgeResourcesEnabled(vm.getServiceOfferingId())) {
return;
}
purgeExpungedResourceLater(vm.getId(), ResourceType.VirtualMachine);
}
@Override
public boolean start() {
if (Boolean.TRUE.equals(ExpungedResourcePurgeEnabled.value())) {
expungedResourcesCleanupExecutor = new ScheduledThreadPoolExecutor(1,
new NamedThreadFactory("ExpungedResourceCleanupWorker"));
expungedResourcesCleanupExecutor.scheduleWithFixedDelay(new ExpungedResourceCleanupWorker(),
ExpungedResourcesPurgeDelay.value(), ExpungedResourcesPurgeInterval.value(), TimeUnit.SECONDS);
}
purgeExpungedResourcesJobExecutor = Executors.newFixedThreadPool(3,
new NamedThreadFactory("Purge-Expunged-Resources-Job-Executor"));
return true;
}
@Override
public boolean stop() {
purgeExpungedResourcesJobExecutor.shutdown();
expungedResourcesCleanupExecutor.shutdownNow();
return true;
}
@Override
public List<Class<?>> getCommands() {
final List<Class<?>> cmdList = new ArrayList<>();
cmdList.add(PurgeExpungedResourcesCmd.class);
return cmdList;
}
@Override
public String getConfigComponentName() {
return ResourceCleanupService.class.getName();
}
@Override
public ConfigKey<?>[] getConfigKeys() {
return new ConfigKey<?>[]{
ExpungedResourcePurgeEnabled,
ExpungedResourcePurgeResources,
ExpungedResourcesPurgeInterval,
ExpungedResourcesPurgeDelay,
ExpungedResourcesPurgeBatchSize,
ExpungedResourcesPurgeStartTime,
ExpungedResourcesPurgeKeepPastDays,
ExpungedResourcePurgeJobDelay
};
}
public class ExpungedResourceCleanupWorker extends ManagedContextRunnable {
@Override
protected void runInContext() {
GlobalLock gcLock = GlobalLock.getInternLock("Expunged.Resource.Cleanup.Lock");
try {
if (gcLock.lock(3)) {
try {
runCleanupForLongestRunningManagementServer();
} finally {
gcLock.unlock();
}
}
} finally {
gcLock.releaseRef();
}
}
protected void runCleanupForLongestRunningManagementServer() {
ManagementServerHostVO msHost = managementServerHostDao.findOneByLongestRuntime();
if (msHost == null || (msHost.getMsid() != ManagementServerNode.getManagementServerId())) {
logger.debug("Skipping the expunged resource cleanup task on this management server");
return;
}
reallyRun();
}
public void reallyRun() {
try {
Date startDate = getStartDateFromConfig();
Date endDate = getEndDateFromConfig();
List<ResourceType> resourceTypes = getResourceTypesFromConfig();
long batchSize = getBatchSizeFromConfig();
if (logger.isDebugEnabled()) {
logger.debug(String.format("Purging resources: %s as part of cleanup with start date: %s, " +
"end date: %s and batch size: %d", StringUtils.join(resourceTypes), startDate, endDate, batchSize));
}
purgeEntities(resourceTypes, batchSize, startDate, endDate);
} catch (Exception e) {
logger.warn("Caught exception while running expunged resources cleanup task: ", e);
}
}
}
protected class PurgeExpungedResourceThread extends ManagedContextRunnable {
ResourceType resourceType;
Long resourceId;
Long batchSize;
Date startDate;
Date endDate;
AsyncCompletionCallback<PurgeExpungedResourcesResult> callback;
long taskTimestamp;
public PurgeExpungedResourceThread(final ResourceType resourceType, final Long batchSize,
final Date startDate, final Date endDate,
AsyncCompletionCallback<PurgeExpungedResourcesResult> callback) {
this.resourceType = resourceType;
this.batchSize = batchSize;
this.startDate = startDate;
this.endDate = endDate;
this.callback = callback;
}
public PurgeExpungedResourceThread(final Long resourceId, final ResourceType resourceType,
AsyncCompletionCallback<PurgeExpungedResourcesResult> callback) {
this.resourceType = resourceType;
this.resourceId = resourceId;
this.callback = callback;
this.taskTimestamp = System.currentTimeMillis();
}
@Override
protected void runInContext() {
logger.trace(String.format("Executing purge for resource type: %s with batch size: %d start: %s, end: %s",
resourceType, batchSize, startDate, endDate));
reallyRun();
}
protected void waitForPurgeSingleResourceDelay(String resourceAsString) throws InterruptedException {
long jobDelayConfig = ExpungedResourcePurgeJobDelay.value();
if (jobDelayConfig < MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS) {
logger.debug(String.format("Value: %d for config: %s is lesser than the minimum value: %d, " +
"using minimum value",
jobDelayConfig,
ExpungedResourcePurgeJobDelay.key(),
MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS));
jobDelayConfig = MINIMUM_EXPUNGED_RESOURCE_PURGE_JOB_DELAY_IN_SECONDS;
}
long delay = (jobDelayConfig * 1000) -
(System.currentTimeMillis() - taskTimestamp);
if (delay > 0) {
if (logger.isTraceEnabled()) {
logger.trace(String.format("Waiting for %d before purging %s", delay, resourceAsString));
}
Thread.sleep(delay);
}
}
protected void purgeSingleResource() {
String resourceAsString = String.format("resource [type: %s, ID: %d]", resourceType, resourceId);
try {
waitForPurgeSingleResourceDelay(resourceAsString);
if (!purgeExpungedResource(resourceId, resourceType)) {
throw new CloudRuntimeException(String.format("Failed to purge %s", resourceAsString));
}
if (logger.isDebugEnabled()) {
logger.info(String.format("Purged %s", resourceAsString));
}
callback.complete(new PurgeExpungedResourcesResult(resourceId, resourceType, null));
} catch (CloudRuntimeException e) {
logger.error(String.format("Caught exception while purging %s: ", resourceAsString), e);
callback.complete(new PurgeExpungedResourcesResult(resourceId, resourceType, e.getMessage()));
} catch (InterruptedException e) {
logger.error(String.format("Caught exception while waiting for purging %s: ", resourceAsString), e);
callback.complete(new PurgeExpungedResourcesResult(resourceId, resourceType, e.getMessage()));
}
}
protected void purgeMultipleResources() {
try {
long purged = purgeEntities(resourceType == null ? null : List.of(resourceType),
batchSize, startDate, endDate);
callback.complete(new PurgeExpungedResourcesResult(resourceType, batchSize, startDate, endDate, purged));
} catch (CloudRuntimeException e) {
logger.error("Caught exception while expunging resources: ", e);
callback.complete(new PurgeExpungedResourcesResult(resourceType, batchSize, startDate, endDate, e.getMessage()));
}
}
public void reallyRun() {
if (resourceId != null) {
purgeSingleResource();
return;
}
purgeMultipleResources();
}
}
public static class PurgeExpungedResourcesResult extends CommandResult {
ResourceType resourceType;
Long resourceId;
Long batchSize;
Date startDate;
Date endDate;
Long purgedCount;
public PurgeExpungedResourcesResult(final ResourceType resourceType, final Long batchSize,
final Date startDate, final Date endDate, final long purgedCount) {
super();
this.resourceType = resourceType;
this.batchSize = batchSize;
this.startDate = startDate;
this.endDate = endDate;
this.purgedCount = purgedCount;
this.setSuccess(true);
}
public PurgeExpungedResourcesResult(final ResourceType resourceType, final Long batchSize,
final Date startDate, final Date endDate, final String error) {
super();
this.resourceType = resourceType;
this.batchSize = batchSize;
this.startDate = startDate;
this.endDate = endDate;
this.setResult(error);
}
public PurgeExpungedResourcesResult(final Long resourceId, final ResourceType resourceType,
final String error) {
super();
this.resourceId = resourceId;
this.resourceType = resourceType;
if (error != null) {
this.setResult(error);
} else {
this.purgedCount = 1L;
this.setSuccess(true);
}
}
public ResourceType getResourceType() {
return resourceType;
}
public Long getResourceId() {
return resourceId;
}
public Long getBatchSize() {
return batchSize;
}
public Date getStartDate() {
return startDate;
}
public Date getEndDate() {
return endDate;
}
public Long getPurgedCount() {
return purgedCount;
}
}
public static class PurgeExpungedResourcesContext<T> extends AsyncRpcContext<T> {
final AsyncCallFuture<PurgeExpungedResourcesResult> future;
public PurgeExpungedResourcesContext(AsyncCompletionCallback<T> callback,
AsyncCallFuture<PurgeExpungedResourcesResult> future) {
super(callback);
this.future = future;
}
}
}

View File

@ -81,6 +81,8 @@
value="#{resourceDiscoverersRegistry.registered}" />
</bean>
<bean id="resourceCleanupServiceImpl" class="org.apache.cloudstack.resource.ResourceCleanupServiceImpl" />
<!-- the new background poll manager -->
<bean id="bgPollManager" class="org.apache.cloudstack.poll.BackgroundPollManagerImpl">
</bean>

View File

@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.ha.dao;
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import org.mockito.stubbing.Answer;
import com.cloud.ha.HaWorkVO;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
@RunWith(MockitoJUnitRunner.class)
public class HighAvailabilityDaoImplTest {
@Spy
HighAvailabilityDaoImpl highAvailabilityDaoImpl;
@Test
public void testExpungeByVmListNoVms() {
Assert.assertEquals(0, highAvailabilityDaoImpl.expungeByVmList(
new ArrayList<>(), 100L));
Assert.assertEquals(0, highAvailabilityDaoImpl.expungeByVmList(
null, 100L));
}
@Test
public void testExpungeByVmList() {
SearchBuilder<HaWorkVO> sb = Mockito.mock(SearchBuilder.class);
SearchCriteria<HaWorkVO> sc = Mockito.mock(SearchCriteria.class);
Mockito.when(sb.create()).thenReturn(sc);
Mockito.doAnswer((Answer<Integer>) invocationOnMock -> {
Long batchSize = (Long)invocationOnMock.getArguments()[1];
return batchSize == null ? 0 : batchSize.intValue();
}).when(highAvailabilityDaoImpl).batchExpunge(Mockito.any(SearchCriteria.class), Mockito.anyLong());
Mockito.when(highAvailabilityDaoImpl.createSearchBuilder()).thenReturn(sb);
final HaWorkVO mockedVO = Mockito.mock(HaWorkVO.class);
Mockito.when(sb.entity()).thenReturn(mockedVO);
List<Long> vmIds = List.of(1L, 2L);
Object[] array = vmIds.toArray();
Long batchSize = 50L;
Assert.assertEquals(batchSize.intValue(), highAvailabilityDaoImpl.expungeByVmList(List.of(1L, 2L), batchSize));
Mockito.verify(sc).setParameters("vmIds", array);
Mockito.verify(highAvailabilityDaoImpl, Mockito.times(1))
.batchExpunge(sc, batchSize);
}
}

View File

@ -332,4 +332,19 @@ public class MockUsageEventDao implements UsageEventDao{
public Pair<List<UsageEventVO>, Integer> searchAndCount(SearchCriteria<UsageEventVO> sc, Filter filter, boolean includeRemoved) {
return null;
}
@Override
public int expunge(SearchCriteria<UsageEventVO> sc, Filter filter) {
return 0;
}
@Override
public int batchExpunge(SearchCriteria<UsageEventVO> sc, Long batchSize) {
return 0;
}
@Override
public int expungeList(List<Long> longs) {
return 0;
}
}

View File

@ -1115,4 +1115,8 @@ public class MockNetworkManagerImpl extends ManagerBase implements NetworkOrches
public List<InternalLoadBalancerElementService> getInternalLoadBalancerElements() {
return null;
}
@Override
public void expungeLbVmRefs(List<Long> vmIds, Long batchSize) {
}
}

View File

@ -0,0 +1,656 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.cloudstack.resource;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
import java.util.concurrent.TimeUnit;
import org.apache.cloudstack.api.command.admin.resource.PurgeExpungedResourcesCmd;
import org.apache.cloudstack.engine.orchestration.service.NetworkOrchestrationService;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.jobs.dao.VmWorkJobDao;
import org.apache.cloudstack.storage.datastore.db.SnapshotDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.VolumeDataStoreDao;
import org.apache.commons.collections.CollectionUtils;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.Spy;
import org.mockito.junit.MockitoJUnitRunner;
import com.cloud.exception.InvalidParameterValueException;
import com.cloud.ha.HighAvailabilityManager;
import com.cloud.network.as.dao.AutoScaleVmGroupVmMapDao;
import com.cloud.network.dao.IPAddressDao;
import com.cloud.network.dao.InlineLoadBalancerNicMapDao;
import com.cloud.network.dao.LoadBalancerVMMapDao;
import com.cloud.network.dao.OpRouterMonitorServiceDao;
import com.cloud.network.rules.dao.PortForwardingRulesDao;
import com.cloud.offering.ServiceOffering;
import com.cloud.secstorage.CommandExecLogDao;
import com.cloud.service.dao.ServiceOfferingDetailsDao;
import com.cloud.storage.SnapshotVO;
import com.cloud.storage.VolumeVO;
import com.cloud.storage.dao.SnapshotDao;
import com.cloud.storage.dao.SnapshotDetailsDao;
import com.cloud.storage.dao.VolumeDao;
import com.cloud.storage.dao.VolumeDetailsDao;
import com.cloud.utils.Pair;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.vm.ItWorkDao;
import com.cloud.vm.NicVO;
import com.cloud.vm.VMInstanceVO;
import com.cloud.vm.VirtualMachine;
import com.cloud.vm.dao.ConsoleSessionDao;
import com.cloud.vm.dao.NicDao;
import com.cloud.vm.dao.NicDetailsDao;
import com.cloud.vm.dao.NicExtraDhcpOptionDao;
import com.cloud.vm.dao.NicSecondaryIpDao;
import com.cloud.vm.dao.UserVmDetailsDao;
import com.cloud.vm.dao.VMInstanceDao;
import com.cloud.vm.snapshot.VMSnapshotVO;
import com.cloud.vm.snapshot.dao.VMSnapshotDao;
import com.cloud.vm.snapshot.dao.VMSnapshotDetailsDao;
@RunWith(MockitoJUnitRunner.class)
public class ResourceCleanupServiceImplTest {
@Mock
VMInstanceDao vmInstanceDao;
@Mock
VolumeDao volumeDao;
@Mock
VolumeDetailsDao volumeDetailsDao;
@Mock
VolumeDataStoreDao volumeDataStoreDao;
@Mock
SnapshotDao snapshotDao;
@Mock
SnapshotDetailsDao snapshotDetailsDao;
@Mock
SnapshotDataStoreDao snapshotDataStoreDao;
@Mock
NicDao nicDao;
@Mock
NicDetailsDao nicDetailsDao;
@Mock
NicExtraDhcpOptionDao nicExtraDhcpOptionDao;
@Mock
InlineLoadBalancerNicMapDao inlineLoadBalancerNicMapDao;
@Mock
VMSnapshotDao vmSnapshotDao;
@Mock
VMSnapshotDetailsDao vmSnapshotDetailsDao;
@Mock
UserVmDetailsDao userVmDetailsDao;
@Mock
AutoScaleVmGroupVmMapDao autoScaleVmGroupVmMapDao;
@Mock
CommandExecLogDao commandExecLogDao;
@Mock
NetworkOrchestrationService networkOrchestrationService;
@Mock
LoadBalancerVMMapDao loadBalancerVMMapDao;
@Mock
NicSecondaryIpDao nicSecondaryIpDao;
@Mock
HighAvailabilityManager highAvailabilityManager;
@Mock
ItWorkDao itWorkDao;
@Mock
OpRouterMonitorServiceDao opRouterMonitorServiceDao;
@Mock
PortForwardingRulesDao portForwardingRulesDao;
@Mock
IPAddressDao ipAddressDao;
@Mock
VmWorkJobDao vmWorkJobDao;
@Mock
ConsoleSessionDao consoleSessionDao;
@Mock
ServiceOfferingDetailsDao serviceOfferingDetailsDao;
@Spy
@InjectMocks
ResourceCleanupServiceImpl resourceCleanupService = Mockito.spy(new ResourceCleanupServiceImpl());
List<Long> ids = List.of(1L, 2L);
Long batchSize = 100L;
private void overrideConfigValue(final ConfigKey configKey, final Object value) {
try {
Field f = ConfigKey.class.getDeclaredField("_value");
f.setAccessible(true);
f.set(configKey, value);
} catch (IllegalAccessException | NoSuchFieldException e) {
Assert.fail(e.getMessage());
}
}
@Test
public void testPurgeLinkedSnapshotEntitiesNoSnapshots() {
resourceCleanupService.purgeLinkedSnapshotEntities(new ArrayList<>(), batchSize);
Mockito.verify(snapshotDetailsDao, Mockito.never())
.batchExpungeForResources(Mockito.anyList(), Mockito.anyLong());
Mockito.verify(snapshotDataStoreDao, Mockito.never())
.expungeBySnapshotList(Mockito.anyList(), Mockito.anyLong());
}
@Test
public void testPurgeLinkedSnapshotEntities() {
Mockito.when(snapshotDetailsDao.batchExpungeForResources(ids, batchSize)).thenReturn(2L);
Mockito.when(snapshotDataStoreDao.expungeBySnapshotList(ids, batchSize)).thenReturn(2);
resourceCleanupService.purgeLinkedSnapshotEntities(ids, batchSize);
Mockito.verify(snapshotDetailsDao, Mockito.times(1))
.batchExpungeForResources(ids, batchSize);
Mockito.verify(snapshotDataStoreDao, Mockito.times(1))
.expungeBySnapshotList(ids, batchSize);
}
@Test
public void testPurgeVolumeSnapshotsNoVolumes() {
Assert.assertEquals(0, resourceCleanupService.purgeVolumeSnapshots(new ArrayList<>(), 50L));
Mockito.verify(snapshotDao, Mockito.never()).createSearchBuilder();
}
@Test
public void testPurgeVolumeSnapshots() {
SearchBuilder<SnapshotVO> sb = Mockito.mock(SearchBuilder.class);
Mockito.when(sb.entity()).thenReturn(Mockito.mock(SnapshotVO.class));
Mockito.when(sb.create()).thenReturn(Mockito.mock(SearchCriteria.class));
Mockito.when(snapshotDao.createSearchBuilder()).thenReturn(sb);
Assert.assertEquals(0, resourceCleanupService.purgeVolumeSnapshots(new ArrayList<>(), 50L));
Mockito.when(snapshotDao.searchIncludingRemoved(Mockito.any(), Mockito.any(),
Mockito.any(), Mockito.anyBoolean()))
.thenReturn(List.of(Mockito.mock(SnapshotVO.class), Mockito.mock(SnapshotVO.class)));
Mockito.when(snapshotDao.expungeList(Mockito.anyList())).thenReturn(2);
Assert.assertEquals(2, resourceCleanupService.purgeVolumeSnapshots(ids, batchSize));
}
@Test
public void testPurgeLinkedVolumeEntitiesNoVolumes() {
resourceCleanupService.purgeLinkedVolumeEntities(new ArrayList<>(), 50L);
Mockito.verify(volumeDetailsDao, Mockito.never()).batchExpungeForResources(Mockito.anyList(),
Mockito.anyLong());
}
@Test
public void testPurgeLinkedVolumeEntities() {
Mockito.when(volumeDetailsDao.batchExpungeForResources(ids, batchSize)).thenReturn(2L);
Mockito.when(volumeDataStoreDao.expungeByVolumeList(ids, batchSize)).thenReturn(2);
Mockito.doReturn(2L).when(resourceCleanupService).purgeVolumeSnapshots(ids, batchSize);
resourceCleanupService.purgeLinkedVolumeEntities(ids, batchSize);
Mockito.verify(volumeDetailsDao, Mockito.times(1))
.batchExpungeForResources(ids, batchSize);
Mockito.verify(volumeDataStoreDao, Mockito.times(1))
.expungeByVolumeList(ids, batchSize);
Mockito.verify(resourceCleanupService, Mockito.times(1))
.purgeVolumeSnapshots(ids, batchSize);
}
@Test
public void testPurgeVMVolumesNoVms() {
Assert.assertEquals(0, resourceCleanupService.purgeVMVolumes(new ArrayList<>(), 50L));
Mockito.verify(volumeDao, Mockito.never()).searchRemovedByVms(Mockito.anyList(), Mockito.anyLong());
}
@Test
public void testPurgeVMVolumes() {
Mockito.when(volumeDao.searchRemovedByVms(ids, batchSize))
.thenReturn(List.of(Mockito.mock(VolumeVO.class), Mockito.mock(VolumeVO.class)));
Mockito.when(volumeDao.expungeList(Mockito.anyList())).thenReturn(2);
Mockito.doNothing().when(resourceCleanupService).purgeLinkedVolumeEntities(Mockito.anyList(),
Mockito.eq(batchSize));
Assert.assertEquals(2, resourceCleanupService.purgeVMVolumes(ids, batchSize));
}
@Test
public void testPurgeLinkedNicEntitiesNoNics() {
resourceCleanupService.purgeLinkedNicEntities(new ArrayList<>(), batchSize);
Mockito.verify(nicDetailsDao, Mockito.never())
.batchExpungeForResources(ids, batchSize);
Mockito.verify(nicExtraDhcpOptionDao, Mockito.never())
.expungeByNicList(ids, batchSize);
Mockito.verify(inlineLoadBalancerNicMapDao, Mockito.never())
.expungeByNicList(ids, batchSize);
}
@Test
public void testPurgeLinkedNicEntities() {
Mockito.when(nicDetailsDao.batchExpungeForResources(ids, batchSize)).thenReturn(2L);
Mockito.when(nicExtraDhcpOptionDao.expungeByNicList(ids, batchSize)).thenReturn(2);
Mockito.when(inlineLoadBalancerNicMapDao.expungeByNicList(ids, batchSize)).thenReturn(2);
resourceCleanupService.purgeLinkedNicEntities(ids, batchSize);
Mockito.verify(nicDetailsDao, Mockito.times(1))
.batchExpungeForResources(ids, batchSize);
Mockito.verify(nicExtraDhcpOptionDao, Mockito.times(1))
.expungeByNicList(ids, batchSize);
Mockito.verify(inlineLoadBalancerNicMapDao, Mockito.times(1))
.expungeByNicList(ids, batchSize);
}
@Test
public void testPurgeVMNicsNoVms() {
Assert.assertEquals(0, resourceCleanupService.purgeVMNics(new ArrayList<>(), 50L));
Mockito.verify(nicDao, Mockito.never()).searchRemovedByVms(Mockito.anyList(), Mockito.anyLong());
}
@Test
public void testPurgeVMNics() {
Mockito.when(nicDao.searchRemovedByVms(ids, batchSize))
.thenReturn(List.of(Mockito.mock(NicVO.class), Mockito.mock(NicVO.class)));
Mockito.when(nicDao.expungeList(Mockito.anyList())).thenReturn(2);
Mockito.doNothing().when(resourceCleanupService).purgeLinkedNicEntities(Mockito.anyList(),
Mockito.eq(batchSize));
Assert.assertEquals(2, resourceCleanupService.purgeVMNics(ids, batchSize));
}
@Test
public void testPurgeVMSnapshotsNoVms() {
Assert.assertEquals(0, resourceCleanupService.purgeVMSnapshots(new ArrayList<>(), 50L));
Mockito.verify(vmSnapshotDao, Mockito.never()).searchRemovedByVms(Mockito.anyList(), Mockito.anyLong());
}
@Test
public void testPurgeVMSnapshots() {
Mockito.when(vmSnapshotDao.searchRemovedByVms(ids, batchSize))
.thenReturn(List.of(Mockito.mock(VMSnapshotVO.class), Mockito.mock(VMSnapshotVO.class)));
Mockito.when(vmSnapshotDao.expungeList(Mockito.anyList())).thenReturn(2);
Mockito.when(vmSnapshotDetailsDao.batchExpungeForResources(Mockito.anyList(),
Mockito.eq(batchSize))).thenReturn(2L);
Assert.assertEquals(2, resourceCleanupService.purgeVMSnapshots(ids, batchSize));
}
@Test
public void testPurgeLinkedVMEntitiesNoVms() {
resourceCleanupService.purgeLinkedVMEntities(new ArrayList<>(), 50L);
Mockito.verify(resourceCleanupService, Mockito.never()).purgeVMVolumes(Mockito.anyList(),
Mockito.anyLong());
Mockito.verify(userVmDetailsDao, Mockito.never())
.batchExpungeForResources(Mockito.anyList(), Mockito.anyLong());
}
@Test
public void testPurgeLinkedVMEntities() {
Mockito.doReturn(2L).when(resourceCleanupService).purgeVMVolumes(Mockito.anyList(),
Mockito.eq(batchSize));
Mockito.doReturn(2L).when(resourceCleanupService).purgeVMNics(Mockito.anyList(),
Mockito.eq(batchSize));
Mockito.when(userVmDetailsDao.batchExpungeForResources(Mockito.anyList(), Mockito.anyLong())).thenReturn(2L);
Mockito.doReturn(2L).when(resourceCleanupService).purgeVMSnapshots(Mockito.anyList(),
Mockito.eq(batchSize));
Mockito.when(autoScaleVmGroupVmMapDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(commandExecLogDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(loadBalancerVMMapDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(nicSecondaryIpDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(highAvailabilityManager.expungeWorkItemsByVmList(Mockito.anyList(), Mockito.anyLong()))
.thenReturn(2);
Mockito.when(itWorkDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(opRouterMonitorServiceDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(portForwardingRulesDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(ipAddressDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(vmWorkJobDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
Mockito.when(consoleSessionDao.expungeByVmList(Mockito.anyList(), Mockito.anyLong())).thenReturn(2);
resourceCleanupService.purgeLinkedVMEntities(ids, batchSize);
Mockito.verify(resourceCleanupService, Mockito.times(1)).purgeVMVolumes(ids, batchSize);
Mockito.verify(resourceCleanupService, Mockito.times(1)).purgeVMNics(ids, batchSize);
Mockito.verify(userVmDetailsDao, Mockito.times(1))
.batchExpungeForResources(ids, batchSize);
Mockito.verify(resourceCleanupService, Mockito.times(1))
.purgeVMSnapshots(ids, batchSize);
Mockito.verify(autoScaleVmGroupVmMapDao, Mockito.times(1))
.expungeByVmList(ids, batchSize);
Mockito.verify(commandExecLogDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(loadBalancerVMMapDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(nicSecondaryIpDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(highAvailabilityManager, Mockito.times(1)).
expungeWorkItemsByVmList(ids, batchSize);
Mockito.verify(itWorkDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(opRouterMonitorServiceDao, Mockito.times(1))
.expungeByVmList(ids, batchSize);
Mockito.verify(portForwardingRulesDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(ipAddressDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(vmWorkJobDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
Mockito.verify(consoleSessionDao, Mockito.times(1)).expungeByVmList(ids, batchSize);
}
@Test
public void testGetVmIdsWithActiveVolumeSnapshotsNoVms() {
Assert.assertTrue(CollectionUtils.isEmpty(
resourceCleanupService.getVmIdsWithActiveVolumeSnapshots(new ArrayList<>())));
}
@Test
public void testGetVmIdsWithActiveVolumeSnapshots() {
VolumeVO vol1 = Mockito.mock(VolumeVO.class);
Mockito.when(vol1.getId()).thenReturn(1L);
Mockito.when(vol1.getInstanceId()).thenReturn(1L);
VolumeVO vol2 = Mockito.mock(VolumeVO.class);
Mockito.when(vol2.getId()).thenReturn(2L);
Mockito.when(volumeDao.searchRemovedByVms(ids, null)).thenReturn(List.of(vol1, vol2));
SnapshotVO snapshotVO = Mockito.mock(SnapshotVO.class);
Mockito.when(snapshotVO.getVolumeId()).thenReturn(1L);
Mockito.when(snapshotDao.searchByVolumes(Mockito.anyList())).thenReturn(List.of(snapshotVO));
HashSet<Long> vmIds = resourceCleanupService.getVmIdsWithActiveVolumeSnapshots(ids);
Assert.assertTrue(CollectionUtils.isNotEmpty(vmIds));
Assert.assertEquals(1, vmIds.size());
Assert.assertEquals(1L, vmIds.toArray()[0]);
}
@Test
public void testGetFilteredVmIdsForSnapshots() {
Long skippedVmIds = ids.get(0);
Long notSkippedVmIds = ids.get(1);
VMSnapshotVO vmSnapshotVO = Mockito.mock(VMSnapshotVO.class);
Mockito.when(vmSnapshotVO.getVmId()).thenReturn(1L);
Mockito.when(vmSnapshotDao.searchByVms(Mockito.anyList())).thenReturn(List.of(vmSnapshotVO));
HashSet<Long> set = new HashSet<>();
set.add(1L);
Mockito.doReturn(set).when(resourceCleanupService).getVmIdsWithActiveVolumeSnapshots(ids);
Pair<List<Long>, List<Long>> result = resourceCleanupService.getFilteredVmIdsForSnapshots(new ArrayList<>(ids));
Assert.assertEquals(1, result.first().size());
Assert.assertEquals(1, result.second().size());
Assert.assertEquals(notSkippedVmIds, result.first().get(0));
Assert.assertEquals(skippedVmIds, result.second().get(0));
}
@Test
public void testGetVmIdsWithNoActiveSnapshots() {
VMInstanceVO vm1 = Mockito.mock(VMInstanceVO.class);
Mockito.when(vm1.getId()).thenReturn(ids.get(0));
VMInstanceVO vm2 = Mockito.mock(VMInstanceVO.class);
Mockito.when(vm2.getId()).thenReturn(ids.get(1));
Mockito.when(vmInstanceDao.searchRemovedByRemoveDate(Mockito.any(), Mockito.any(),
Mockito.anyLong(), Mockito.anyList())).thenReturn(List.of(vm1, vm2));
Long skippedVmIds = ids.get(0);
Long notSkippedVmIds = ids.get(1);
VMSnapshotVO vmSnapshotVO = Mockito.mock(VMSnapshotVO.class);
Mockito.when(vmSnapshotVO.getVmId()).thenReturn(1L);
Mockito.when(vmSnapshotDao.searchByVms(Mockito.anyList())).thenReturn(List.of(vmSnapshotVO));
HashSet<Long> set = new HashSet<>();
set.add(1L);
Mockito.doReturn(set).when(resourceCleanupService).getVmIdsWithActiveVolumeSnapshots(Mockito.anyList());
Pair<List<Long>, List<Long>> result =
resourceCleanupService.getVmIdsWithNoActiveSnapshots(new Date(), new Date(), batchSize,
new ArrayList<>());
Assert.assertEquals(1, result.first().size());
Assert.assertEquals(1, result.second().size());
Assert.assertEquals(notSkippedVmIds, result.first().get(0));
Assert.assertEquals(skippedVmIds, result.second().get(0));
}
@Test
public void testPurgeVMEntitiesNoVms() {
Mockito.when(vmInstanceDao.searchRemovedByRemoveDate(Mockito.any(), Mockito.any(),
Mockito.anyLong(), Mockito.anyList())).thenReturn(new ArrayList<>());
Assert.assertEquals(0, resourceCleanupService.purgeVMEntities(batchSize, new Date(), new Date()));
}
@Test
public void testPurgeVMEntities() {
Mockito.doReturn(new Pair<>(ids, new ArrayList<>())).when(resourceCleanupService)
.getVmIdsWithNoActiveSnapshots(Mockito.any(), Mockito.any(), Mockito.anyLong(), Mockito.anyList());
Mockito.when(vmInstanceDao.expungeList(ids)).thenReturn(ids.size());
Assert.assertEquals(ids.size(), resourceCleanupService.purgeVMEntities(batchSize, new Date(), new Date()));
}
@Test
public void testExpungeVMEntityFiltered() {
Mockito.doReturn(new Pair<>(new ArrayList<>(), List.of(ids.get(0)))).when(resourceCleanupService)
.getFilteredVmIdsForSnapshots(Mockito.anyList());
Assert.assertFalse(resourceCleanupService.purgeVMEntity(ids.get(0)));
}
@Test
public void testPurgeVMEntityFiltered() {
Mockito.doReturn(new Pair<>(List.of(ids.get(0)), new ArrayList<>())).when(resourceCleanupService)
.getFilteredVmIdsForSnapshots(Mockito.anyList());
Mockito.doNothing().when(resourceCleanupService)
.purgeLinkedVMEntities(Mockito.anyList(), Mockito.anyLong());
Mockito.when(vmInstanceDao.expunge(ids.get(0))).thenReturn(true);
Assert.assertTrue(resourceCleanupService.purgeVMEntity(ids.get(0)));
}
@Test
public void testPurgeVMEntity() {
Mockito.doReturn(new Pair<>(List.of(ids.get(0)), new ArrayList<>())).when(resourceCleanupService)
.getFilteredVmIdsForSnapshots(Mockito.anyList());
Mockito.doNothing().when(resourceCleanupService)
.purgeLinkedVMEntities(Mockito.anyList(), Mockito.anyLong());
Mockito.when(vmInstanceDao.expunge(ids.get(0))).thenReturn(true);
Assert.assertTrue(resourceCleanupService.purgeVMEntity(ids.get(0)));
}
@Test
public void testPurgeEntities() {
Mockito.doReturn((long)ids.size()).when(resourceCleanupService)
.purgeVMEntities(Mockito.anyLong(), Mockito.any(), Mockito.any());
long result = resourceCleanupService.purgeEntities(
List.of(ResourceCleanupService.ResourceType.VirtualMachine), batchSize, new Date(), new Date());
Assert.assertEquals(ids.size(), result);
}
@Test(expected = InvalidParameterValueException.class)
public void testGetResourceTypeAndValidatePurgeExpungedResourcesCmdParamsInvalidResourceType() {
resourceCleanupService.getResourceTypeAndValidatePurgeExpungedResourcesCmdParams("Volume",
new Date(), new Date(), batchSize);
}
@Test(expected = InvalidParameterValueException.class)
public void testGetResourceTypeAndValidatePurgeExpungedResourcesCmdParamsInvalidBatchSize() {
resourceCleanupService.getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(
ResourceCleanupService.ResourceType.VirtualMachine.toString(),
new Date(), new Date(), -1L);
}
@Test(expected = InvalidParameterValueException.class)
public void testGetResourceTypeAndValidatePurgeExpungedResourcesCmdParamsInvalidDates() {
Calendar cal = Calendar.getInstance();
Date startDate = new Date();
cal.setTime(startDate);
cal.add(Calendar.DATE, -1);
Date endDate = cal.getTime();
resourceCleanupService.getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(
ResourceCleanupService.ResourceType.VirtualMachine.toString(),
startDate, endDate, 100L);
}
@Test
public void testGetResourceTypeAndValidatePurgeExpungedResourcesCmdParams() {
Calendar cal = Calendar.getInstance();
Date endDate = new Date();
cal.setTime(endDate);
cal.add(Calendar.DATE, -1);
Date startDate = cal.getTime();
ResourceCleanupService.ResourceType type =
resourceCleanupService.getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(
ResourceCleanupService.ResourceType.VirtualMachine.toString(),
startDate, endDate, 100L);
Assert.assertEquals(ResourceCleanupService.ResourceType.VirtualMachine, type);
}
@Test
public void testGetResourceTypeAndValidatePurgeExpungedResourcesCmdParamsNoValues() {
ResourceCleanupService.ResourceType type =
resourceCleanupService.getResourceTypeAndValidatePurgeExpungedResourcesCmdParams(
null, null, null, null);
Assert.assertNull(type);
}
@Test
public void testIsVmOfferingPurgeResourcesEnabled() {
Mockito.when(serviceOfferingDetailsDao.getDetail(1L,
ServiceOffering.PURGE_DB_ENTITIES_KEY)).thenReturn(null);
Assert.assertFalse(resourceCleanupService.isVmOfferingPurgeResourcesEnabled(1L));
Mockito.when(serviceOfferingDetailsDao.getDetail(2L,
ServiceOffering.PURGE_DB_ENTITIES_KEY)).thenReturn("false");
Assert.assertFalse(resourceCleanupService.isVmOfferingPurgeResourcesEnabled(2L));
Mockito.when(serviceOfferingDetailsDao.getDetail(3L,
ServiceOffering.PURGE_DB_ENTITIES_KEY)).thenReturn("true");
Assert.assertTrue(resourceCleanupService.isVmOfferingPurgeResourcesEnabled(3L));
}
@Test
public void testPurgeExpungedResource() {
Assert.assertFalse(resourceCleanupService.purgeExpungedResource(1L, null));
Mockito.doReturn(true).when(resourceCleanupService)
.purgeExpungedResource(Mockito.anyLong(), Mockito.any());
Assert.assertTrue(resourceCleanupService.purgeExpungedResource(1L,
ResourceCleanupService.ResourceType.VirtualMachine));
}
@Test(expected = InvalidParameterValueException.class)
public void testPurgeExpungedResourcesInvalidResourceType() {
PurgeExpungedResourcesCmd cmd = Mockito.mock(PurgeExpungedResourcesCmd.class);
Mockito.when(cmd.getResourceType()).thenReturn("Volume");
resourceCleanupService.purgeExpungedResources(cmd);
}
@Test(expected = InvalidParameterValueException.class)
public void testPurgeExpungedResourcesInvalidBatchSize() {
PurgeExpungedResourcesCmd cmd = Mockito.mock(PurgeExpungedResourcesCmd.class);
Mockito.when(cmd.getBatchSize()).thenReturn(-1L);
resourceCleanupService.purgeExpungedResources(cmd);
}
@Test(expected = InvalidParameterValueException.class)
public void testPurgeExpungedResourcesInvalidDates() {
Calendar cal = Calendar.getInstance();
Date startDate = new Date();
cal.setTime(startDate);
cal.add(Calendar.DATE, -1);
Date endDate = cal.getTime();
PurgeExpungedResourcesCmd cmd = Mockito.mock(PurgeExpungedResourcesCmd.class);
Mockito.when(cmd.getStartDate()).thenReturn(startDate);
Mockito.when(cmd.getEndDate()).thenReturn(endDate);
resourceCleanupService.purgeExpungedResources(cmd);
}
@Test
public void testPurgeExpungedResources() {
Mockito.doReturn((long)ids.size()).when(resourceCleanupService).purgeExpungedResourceUsingJob(
ResourceCleanupService.ResourceType.VirtualMachine, batchSize, null, null);
PurgeExpungedResourcesCmd cmd = Mockito.mock(PurgeExpungedResourcesCmd.class);
Mockito.when(cmd.getResourceType()).thenReturn(ResourceCleanupService.ResourceType.VirtualMachine.toString());
Mockito.when(cmd.getBatchSize()).thenReturn(batchSize);
long result = resourceCleanupService.purgeExpungedResources(cmd);
Assert.assertEquals(ids.size(), result);
}
@Test
public void testExpungedVmResourcesLaterIfNeededFalse() {
VirtualMachine vm = Mockito.mock(VirtualMachine.class);
Mockito.when(vm.getServiceOfferingId()).thenReturn(1L);
Mockito.doReturn(false).when(resourceCleanupService).isVmOfferingPurgeResourcesEnabled(1L);
resourceCleanupService.purgeExpungedVmResourcesLaterIfNeeded(vm);
Mockito.verify(resourceCleanupService, Mockito.never()).purgeExpungedResourceLater(Mockito.anyLong(), Mockito.any());
}
@Test
public void testExpungedVmResourcesLaterIfNeeded() {
VirtualMachine vm = Mockito.mock(VirtualMachine.class);
Mockito.when(vm.getServiceOfferingId()).thenReturn(1L);
Mockito.doReturn(true).when(resourceCleanupService).isVmOfferingPurgeResourcesEnabled(1L);
Mockito.doNothing().when(resourceCleanupService).purgeExpungedResourceLater(Mockito.anyLong(), Mockito.any());
resourceCleanupService.purgeExpungedVmResourcesLaterIfNeeded(vm);
Mockito.verify(resourceCleanupService, Mockito.times(1))
.purgeExpungedResourceLater(Mockito.anyLong(), Mockito.any());
}
@Test
public void testGetBatchSizeFromConfig() {
int value = 50;
overrideConfigValue(ResourceCleanupService.ExpungedResourcesPurgeBatchSize, String.valueOf(value));
Assert.assertEquals(value, resourceCleanupService.getBatchSizeFromConfig());
}
@Test
public void testGetResourceTypesFromConfigEmpty() {
overrideConfigValue(ResourceCleanupService.ExpungedResourcePurgeResources, "");
Assert.assertNull(resourceCleanupService.getResourceTypesFromConfig());
}
@Test
public void testGetResourceTypesFromConfig() {
overrideConfigValue(ResourceCleanupService.ExpungedResourcePurgeResources, "VirtualMachine");
List<ResourceCleanupService.ResourceType> types = resourceCleanupService.getResourceTypesFromConfig();
Assert.assertEquals(1, types.size());
}
@Test
public void testCalculatePastDateFromConfigNull() {
Assert.assertNull(resourceCleanupService.calculatePastDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeKeepPastDays.key(),
null));
Assert.assertNull(resourceCleanupService.calculatePastDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeKeepPastDays.key(),
0));
}
@Test(expected = CloudRuntimeException.class)
public void testCalculatePastDateFromConfigFail() {
Assert.assertNull(resourceCleanupService.calculatePastDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeKeepPastDays.key(),
-1));
}
@Test
public void testCalculatePastDateFromConfig() {
int days = 10;
Date result = resourceCleanupService.calculatePastDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeKeepPastDays.key(),
days);
Date today = new Date();
long diff = today.getTime() - result.getTime();
Assert.assertEquals(days, TimeUnit.DAYS.convert(diff, TimeUnit.MILLISECONDS));
}
@Test
public void testParseDateFromConfig() {
Assert.assertNull(resourceCleanupService.parseDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeStartTime.key(), ""));
Date date = resourceCleanupService.parseDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeStartTime.key(), "2020-01-01");
Assert.assertNotNull(date);
Calendar calendar = Calendar.getInstance();
calendar.setTime(date);
Assert.assertEquals(2020, calendar.get(Calendar.YEAR));
Assert.assertEquals(0, calendar.get(Calendar.MONTH));
Assert.assertEquals(1, calendar.get(Calendar.DATE));
}
@Test(expected = CloudRuntimeException.class)
public void testParseDateFromConfigFail() {
resourceCleanupService.parseDateFromConfig(
ResourceCleanupService.ExpungedResourcesPurgeStartTime.key(), "ABC");
}
}

View File

@ -0,0 +1,364 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" BVT tests for purging expunged VMs and their resources
"""
# Import Local Modules
from marvin.codes import FAILED
from marvin.cloudstackAPI import (purgeExpungedResources,
listInfrastructure,
listManagementServers)
from marvin.cloudstackException import CloudstackAPIException
from marvin.cloudstackTestCase import cloudstackTestCase
from marvin.lib.base import (Account,
Domain,
ServiceOffering,
DiskOffering,
NetworkOffering,
Network,
VirtualMachine,
Configurations)
from marvin.lib.common import (get_domain,
get_zone,
get_template)
from marvin.lib.utils import (random_gen)
from marvin.lib.decoratorGenerators import skipTestIf
from marvin.sshClient import SshClient
from nose.plugins.attrib import attr
import logging
# Import System modules
import time
from datetime import datetime, timedelta
import pytz
import threading
_multiprocess_shared_ = True
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
class TestPurgeExpungedVms(cloudstackTestCase):
@classmethod
def setUpClass(cls):
cls.testClient = super(TestPurgeExpungedVms, cls).getClsTestClient()
cls.apiclient = cls.testClient.getApiClient()
cls.services = cls.testClient.getParsedTestDataConfig()
cls.dbConnection = cls.testClient.getDbConnection()
# Get Zone, Domain and templates
cls.domain = get_domain(cls.apiclient)
cls.zone = get_zone(cls.apiclient, cls.testClient.getZoneForTests())
cls.services['mode'] = cls.zone.networktype
cls.mgtSvrDetails = cls.config.__dict__["mgtSvr"][0].__dict__
cls.hypervisor = cls.testClient.getHypervisorInfo().lower()
cls.hypervisorIsSimulator = False
if cls.hypervisor == 'simulator':
cls.hypervisorIsSimulator = True
cls._cleanup = []
cls.logger = logging.getLogger('TestPurgeExpungedVms')
cls.logger.setLevel(logging.DEBUG)
template = get_template(
cls.apiclient,
cls.zone.id,
cls.services["ostype"])
if template == FAILED:
assert False, "get_template() failed to return template with description %s" % cls.services["ostype"]
# Set Zones and disk offerings
cls.services["small"]["zoneid"] = cls.zone.id
cls.services["small"]["template"] = template.id
cls.compute_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"])
cls._cleanup.append(cls.compute_offering)
cls.purge_resource_compute_offering = ServiceOffering.create(
cls.apiclient,
cls.services["service_offerings"]["tiny"],
purgeresources=True)
cls._cleanup.append(cls.purge_resource_compute_offering)
cls.disk_offering = DiskOffering.create(
cls.apiclient,
cls.services["disk_offering"]
)
cls._cleanup.append(cls.disk_offering)
cls.network_offering = NetworkOffering.create(
cls.apiclient,
cls.services["l2-network_offering"],
)
cls._cleanup.append(cls.network_offering)
cls.network_offering.update(cls.apiclient, state='Enabled')
cls.services["network"]["networkoffering"] = cls.network_offering.id
cls.domain1 = Domain.create(
cls.apiclient,
cls.services["domain"])
cls._cleanup.append(cls.domain1)
cls.account = Account.create(
cls.apiclient,
cls.services["account"],
domainid=cls.domain1.id)
cls._cleanup.append(cls.account)
cls.userapiclient = cls.testClient.getUserApiClient(
UserName=cls.account.name,
DomainName=cls.account.domain
)
cls.l2_network = Network.create(
cls.userapiclient,
cls.services["l2-network"],
zoneid=cls.zone.id,
networkofferingid=cls.network_offering.id
)
cls.services["virtual_machine"]["zoneid"] = cls.zone.id
cls.services["virtual_machine"]["template"] = template.id
@classmethod
def tearDownClass(cls):
super(TestPurgeExpungedVms, cls).tearDownClass()
def updateVmCreatedRemovedInDb(self, vm_id, timestamp):
# Assuming DB is UTC
utc_timestamp = datetime.strptime(timestamp, DATETIME_FORMAT).astimezone(pytz.utc).strftime(DATETIME_FORMAT)
logging.info("Updating VM: %s created and removed in DB with timestamp: %s" % (vm_id, timestamp))
query = "UPDATE cloud.vm_instance SET created='%s', removed='%s' WHERE uuid='%s'" % (utc_timestamp, utc_timestamp, vm_id)
self.dbConnection.execute(query)
def setupExpungedVm(self, timestamp):
logging.info("Setting up expunged VM with timestamp: %s" % timestamp)
vm = VirtualMachine.create(
self.userapiclient,
self.services["virtual_machine"],
serviceofferingid=self.compute_offering.id,
networkids=self.l2_network.id
)
self.cleanup.append(vm)
vm_id = vm.id
self.vm_ids[timestamp] = vm_id
vm.delete(self.apiclient, expunge=True)
self.cleanup.remove(vm)
self.updateVmCreatedRemovedInDb(vm_id, timestamp)
def setupExpungedVms(self):
logging.info("Setup VMs")
self.vm_ids = {}
self.threads = []
days = 3
for i in range(days):
logging.info("Setting up expunged VMs for day: %d" % (i + 1))
thread = threading.Thread(target=self.setupExpungedVm, args=(self.timestamps[i],))
self.threads.append(thread)
thread.start()
for index, thread in enumerate(self.threads):
logging.info("Before joining thread %d." % index)
thread.join()
logging.info("Thread %d done" % index)
def setUp(self):
self.cleanup = []
self.changedConfigurations = {}
self.staticConfigurations = []
if 'service_offering' in self._testMethodName:
return
if 'background_task' in self._testMethodName and self.hypervisorIsSimulator:
return
self.days = 3
self.timestamps = []
for i in range(self.days):
days_ago = (self.days - i) * 2
now = (datetime.now() - timedelta(days = days_ago))
timestamp = now.strftime(DATETIME_FORMAT)
self.timestamps.append(timestamp)
self.setupExpungedVms()
def tearDown(self):
restartServer = False
for config in self.changedConfigurations:
value = self.changedConfigurations[config]
logging.info("Reverting value of config: %s to %s" % (config, value))
Configurations.update(self.apiclient,
config,
value=value)
if config in self.staticConfigurations:
restartServer = True
if restartServer:
self.restartAllManagementServers()
super(TestPurgeExpungedVms, self).tearDown()
def executePurgeExpungedResources(self, start_date, end_date):
cmd = purgeExpungedResources.purgeExpungedResourcesCmd()
if start_date is not None:
cmd.startdate = start_date
if end_date is not None:
cmd.enddate = end_date
self.apiclient.purgeExpungedResources(cmd)
def getVmsInDb(self, vm_ids):
vm_id_str = "','".join(vm_ids)
vm_id_str = "'" + vm_id_str + "'"
query = "SELECT * FROM cloud.vm_instance WHERE uuid IN (%s)" % vm_id_str
response = self.dbConnection.execute(query)
logging.info("DB response from VM: %s:: %s" % (vm_id_str, response))
return response
def validatePurgedVmEntriesInDb(self, purged, not_purged):
if purged is not None:
response = self.getVmsInDb(purged)
self.assertTrue(response is None or len(response) == 0,
"Purged VMs still present in DB")
if not_purged is not None:
response = self.getVmsInDb(not_purged)
self.assertTrue(response is not None or len(response) == len(not_purged),
"Not purged VM not present in DB")
def changeConfiguration(self, name, value):
current_config = Configurations.list(self.apiclient, name=name)[0]
if current_config.value == value:
return
logging.info("Current value for config: %s is %s, changing it to %s" % (name, current_config.value, value))
self.changedConfigurations[name] = current_config.value
if current_config.isdynamic == False:
self.staticConfigurations.append(name)
Configurations.update(self.apiclient,
name,
value=value)
def isManagementUp(self):
try:
self.apiclient.listInfrastructure(listInfrastructure.listInfrastructureCmd())
return True
except Exception:
return False
def getManagementServerIps(self):
if self.mgtSvrDetails["mgtSvrIp"] == 'localhost':
return None
cmd = listManagementServers.listManagementServersCmd()
servers = self.apiclient.listManagementServers(cmd)
active_server_ips = []
active_server_ips.append(self.mgtSvrDetails["mgtSvrIp"])
for idx, server in enumerate(servers):
if server.state == 'Up' and server.serviceip != self.mgtSvrDetails["mgtSvrIp"]:
active_server_ips.append(server.serviceip)
return active_server_ips
def restartAllManagementServers(self):
"""Restart all management servers
Assumes all servers have same username and password"""
server_ips = self.getManagementServerIps()
if server_ips is None:
self.staticConfigurations.clear()
self.fail("MS restarts cannot be done on %s" % self.mgtSvrDetails["mgtSvrIp"])
return False
self.debug("Restarting all management server")
for idx, server_ip in enumerate(server_ips):
sshClient = SshClient(
server_ip,
22,
self.mgtSvrDetails["user"],
self.mgtSvrDetails["passwd"]
)
command = "service cloudstack-management stop"
sshClient.execute(command)
command = "service cloudstack-management start"
sshClient.execute(command)
# Waits for management to come up in 10 mins, when it's up it will continue
timeout = time.time() + (10 * 60)
while time.time() < timeout:
if self.isManagementUp() is True: return True
time.sleep(5)
self.debug("Management server did not come up, failing")
return False
@attr(tags=["advanced"], required_hardware="true")
def test_01_purge_expunged_api_vm_start_date(self):
self.executePurgeExpungedResources(self.timestamps[1], None)
self.validatePurgedVmEntriesInDb(
[self.vm_ids[self.timestamps[1]], self.vm_ids[self.timestamps[2]]],
[self.vm_ids[self.timestamps[0]]]
)
@attr(tags=["advanced"], required_hardware="true")
def test_02_purge_expunged_api_vm_end_date(self):
self.executePurgeExpungedResources(None, self.timestamps[1])
self.validatePurgedVmEntriesInDb(
[self.vm_ids[self.timestamps[0]], self.vm_ids[self.timestamps[1]]],
[self.vm_ids[self.timestamps[2]]]
)
@attr(tags=["advanced"], required_hardware="true")
def test_03_purge_expunged_api_vm_start_end_date(self):
self.executePurgeExpungedResources(self.timestamps[0], self.timestamps[2])
self.validatePurgedVmEntriesInDb(
[self.vm_ids[self.timestamps[0]], self.vm_ids[self.timestamps[1]], self.vm_ids[self.timestamps[2]]],
None
)
@attr(tags=["advanced"], required_hardware="true")
def test_04_purge_expunged_api_vm_no_date(self):
self.executePurgeExpungedResources(None, None)
self.validatePurgedVmEntriesInDb(
[self.vm_ids[self.timestamps[0]], self.vm_ids[self.timestamps[1]], self.vm_ids[self.timestamps[2]]],
None
)
@attr(tags=["advanced", "skip_setup_vms"], required_hardware="true")
def test_05_purge_expunged_vm_service_offering(self):
purge_delay = 181
self.changeConfiguration('expunged.resource.purge.job.delay', purge_delay)
vm = VirtualMachine.create(
self.userapiclient,
self.services["virtual_machine"],
serviceofferingid=self.purge_resource_compute_offering.id,
networkids=self.l2_network.id
)
self.cleanup.append(vm)
vm_id = vm.id
vm.delete(self.apiclient, expunge=True)
self.cleanup.remove(vm)
wait = 1.25 * purge_delay
logging.info("Waiting for 1.25x%d = %d seconds for VM to get purged" % (purge_delay, wait))
time.sleep(wait)
self.validatePurgedVmEntriesInDb(
[vm_id],
None
)
@skipTestIf("hypervisorIsSimulator")
@attr(tags=["advanced"], required_hardware="true")
def test_06_purge_expunged_vm_background_task(self):
purge_task_delay = 60
self.changeConfiguration('expunged.resources.purge.enabled', 'true')
self.changeConfiguration('expunged.resources.purge.delay', purge_task_delay)
self.changeConfiguration('expunged.resources.purge.keep.past.days', 1)
if len(self.staticConfigurations) > 0:
self.restartAllManagementServers()
wait = 2 * purge_task_delay
logging.info("Waiting for 2x%d = %d seconds for background task to execute" % (purge_task_delay, wait))
time.sleep(wait)
self.validatePurgedVmEntriesInDb(
[self.vm_ids[self.timestamps[0]], self.vm_ids[self.timestamps[1]], self.vm_ids[self.timestamps[2]]],
None
)

View File

@ -276,7 +276,8 @@ known_categories = {
'listVmsForImport': 'Virtual Machine',
'importVm': 'Virtual Machine',
'Webhook': 'Webhook',
'Webhooks': 'Webhook'
'Webhooks': 'Webhook',
'purgeExpungedResources': 'Resource'
}

View File

@ -1687,6 +1687,7 @@
"label.publickey": "Public key",
"label.publicnetwork": "Public Network",
"label.publicport": "Public port",
"label.purgeresources": "Purge Resources",
"label.purpose": "Purpose",
"label.qostype": "QoS type",
"label.quickview": "Quick view",

View File

@ -40,7 +40,7 @@ export default {
filters: ['active', 'inactive'],
columns: ['name', 'displaytext', 'state', 'cpunumber', 'cpuspeed', 'memory', 'domain', 'zone', 'order'],
details: () => {
var fields = ['name', 'id', 'displaytext', 'offerha', 'provisioningtype', 'storagetype', 'iscustomized', 'iscustomizediops', 'limitcpuuse', 'cpunumber', 'cpuspeed', 'memory', 'hosttags', 'tags', 'storagetags', 'domain', 'zone', 'created', 'dynamicscalingenabled', 'diskofferingstrictness', 'encryptroot']
var fields = ['name', 'id', 'displaytext', 'offerha', 'provisioningtype', 'storagetype', 'iscustomized', 'iscustomizediops', 'limitcpuuse', 'cpunumber', 'cpuspeed', 'memory', 'hosttags', 'tags', 'storagetags', 'domain', 'zone', 'created', 'dynamicscalingenabled', 'diskofferingstrictness', 'encryptroot', 'purgeresources']
if (store.getters.apis.createServiceOffering &&
store.getters.apis.createServiceOffering.params.filter(x => x.name === 'storagepolicy').length > 0) {
fields.splice(6, 0, 'vspherestoragepolicy')

View File

@ -343,6 +343,12 @@
</a-select-option>
</a-select>
</a-form-item>
<a-form-item name="purgeresources" ref="purgeresources">
<template #label>
<tooltip-label :title="$t('label.purgeresources')" :tooltip="apiParams.purgeresources.description"/>
</template>
<a-switch v-model:checked="form.purgeresources"/>
</a-form-item>
<a-form-item name="computeonly" ref="computeonly">
<template #label>
<tooltip-label :title="$t('label.computeonly.offering')" :tooltip="$t('label.computeonly.offering.tooltip')"/>
@ -958,7 +964,8 @@ export default {
limitcpuuse: values.limitcpuuse === true,
dynamicscalingenabled: values.dynamicscalingenabled,
diskofferingstrictness: values.diskofferingstrictness,
encryptroot: values.encryptdisk
encryptroot: values.encryptdisk,
purgeresources: values.purgeresources
}
if (values.diskofferingid) {
params.diskofferingid = values.diskofferingid