mirror of
https://github.com/apache/cloudstack.git
synced 2025-10-26 08:42:29 +01:00
Fixup main build errors (#9330)
* Fixup main build errors * Fixup flaky test * Address comments
This commit is contained in:
parent
de683a5163
commit
7c32bd2506
@ -25,7 +25,6 @@ import javax.inject.Inject;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.agent.AgentManager;
|
||||
import com.cloud.agent.api.Answer;
|
||||
@ -39,9 +38,12 @@ import com.cloud.storage.StoragePool;
|
||||
import com.cloud.storage.StoragePoolHostVO;
|
||||
import com.cloud.storage.dao.StoragePoolHostDao;
|
||||
import com.cloud.utils.Pair;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
public class BasePrimaryDataStoreLifeCycleImpl {
|
||||
private static final Logger s_logger = Logger.getLogger(BasePrimaryDataStoreLifeCycleImpl.class);
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
AgentManager agentMgr;
|
||||
@Inject
|
||||
@ -70,13 +72,13 @@ public class BasePrimaryDataStoreLifeCycleImpl {
|
||||
|
||||
public void changeStoragePoolScopeToZone(DataStore store, ClusterScope clusterScope, HypervisorType hypervisorType) {
|
||||
List<HostVO> hosts = getPoolHostsList(clusterScope, hypervisorType);
|
||||
s_logger.debug("Changing scope of the storage pool to Zone");
|
||||
logger.debug("Changing scope of the storage pool to Zone");
|
||||
if (hosts != null) {
|
||||
for (HostVO host : hosts) {
|
||||
try {
|
||||
storageMgr.connectHostToSharedPool(host.getId(), store.getId());
|
||||
} catch (Exception e) {
|
||||
s_logger.warn("Unable to establish a connection between " + host + " and " + store, e);
|
||||
logger.warn("Unable to establish a connection between " + host + " and " + store, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -85,7 +87,7 @@ public class BasePrimaryDataStoreLifeCycleImpl {
|
||||
|
||||
public void changeStoragePoolScopeToCluster(DataStore store, ClusterScope clusterScope, HypervisorType hypervisorType) {
|
||||
Pair<List<StoragePoolHostVO>, Integer> hostPoolRecords = storagePoolHostDao.listByPoolIdNotInCluster(clusterScope.getScopeId(), store.getId());
|
||||
s_logger.debug("Changing scope of the storage pool to Cluster");
|
||||
logger.debug("Changing scope of the storage pool to Cluster");
|
||||
if (hostPoolRecords.second() > 0) {
|
||||
StoragePool pool = (StoragePool) store;
|
||||
for (StoragePoolHostVO host : hostPoolRecords.first()) {
|
||||
@ -94,7 +96,7 @@ public class BasePrimaryDataStoreLifeCycleImpl {
|
||||
|
||||
if (answer != null) {
|
||||
if (!answer.getResult()) {
|
||||
s_logger.debug("Failed to delete storage pool: " + answer.getResult());
|
||||
logger.debug("Failed to delete storage pool: " + answer.getResult());
|
||||
} else if (HypervisorType.KVM != hypervisorType) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -334,7 +334,7 @@ public class QuotaResponseBuilderImplTest extends TestCase {
|
||||
@Test
|
||||
public void validateEndDateOnCreatingNewQuotaTariffTestSetValidEndDate() {
|
||||
Date startDate = DateUtils.addDays(date, -100);
|
||||
Date endDate = DateUtils.addMilliseconds(new Date(), 1);
|
||||
Date endDate = DateUtils.addMinutes(new Date(), 1);
|
||||
|
||||
quotaResponseBuilderSpy.validateEndDateOnCreatingNewQuotaTariff(quotaTariffVoMock, startDate, endDate);
|
||||
Mockito.verify(quotaTariffVoMock).setEndDate(Mockito.any(Date.class));
|
||||
|
||||
@ -18,8 +18,6 @@
|
||||
//
|
||||
package com.cloud.hypervisor.kvm.resource.wrapper;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.agent.api.Answer;
|
||||
import com.cloud.agent.api.CheckConvertInstanceAnswer;
|
||||
import com.cloud.agent.api.CheckConvertInstanceCommand;
|
||||
@ -30,21 +28,19 @@ import com.cloud.resource.ResourceWrapper;
|
||||
@ResourceWrapper(handles = CheckConvertInstanceCommand.class)
|
||||
public class LibvirtCheckConvertInstanceCommandWrapper extends CommandWrapper<CheckConvertInstanceCommand, Answer, LibvirtComputingResource> {
|
||||
|
||||
private static final Logger s_logger = Logger.getLogger(LibvirtCheckConvertInstanceCommandWrapper.class);
|
||||
|
||||
@Override
|
||||
public Answer execute(CheckConvertInstanceCommand cmd, LibvirtComputingResource serverResource) {
|
||||
if (!serverResource.hostSupportsInstanceConversion()) {
|
||||
String msg = String.format("Cannot convert the instance from VMware as the virt-v2v binary is not found on host %s. " +
|
||||
"Please install virt-v2v%s on the host before attempting the instance conversion.", serverResource.getPrivateIp(), serverResource.isUbuntuHost()? ", nbdkit" : "");
|
||||
s_logger.info(msg);
|
||||
logger.info(msg);
|
||||
return new CheckConvertInstanceAnswer(cmd, false, msg);
|
||||
}
|
||||
|
||||
if (cmd.getCheckWindowsGuestConversionSupport() && !serverResource.hostSupportsWindowsGuestConversion()) {
|
||||
String msg = String.format("Cannot convert the instance from VMware as the virtio-win package is not found on host %s. " +
|
||||
"Please install virtio-win package on the host before attempting the windows guest instance conversion.", serverResource.getPrivateIp());
|
||||
s_logger.info(msg);
|
||||
logger.info(msg);
|
||||
return new CheckConvertInstanceAnswer(cmd, false, msg);
|
||||
}
|
||||
|
||||
|
||||
@ -21,8 +21,6 @@ package com.cloud.hypervisor.kvm.resource.wrapper;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.agent.api.Answer;
|
||||
import com.cloud.agent.api.PrepareStorageClientAnswer;
|
||||
import com.cloud.agent.api.PrepareStorageClientCommand;
|
||||
@ -35,15 +33,13 @@ import com.cloud.utils.Ternary;
|
||||
@ResourceWrapper(handles = PrepareStorageClientCommand.class)
|
||||
public class LibvirtPrepareStorageClientCommandWrapper extends CommandWrapper<PrepareStorageClientCommand, Answer, LibvirtComputingResource> {
|
||||
|
||||
private static final Logger s_logger = Logger.getLogger(LibvirtPrepareStorageClientCommandWrapper.class);
|
||||
|
||||
@Override
|
||||
public Answer execute(PrepareStorageClientCommand cmd, LibvirtComputingResource libvirtComputingResource) {
|
||||
final KVMStoragePoolManager storagePoolMgr = libvirtComputingResource.getStoragePoolMgr();
|
||||
Ternary<Boolean, Map<String, String>, String> prepareStorageClientResult = storagePoolMgr.prepareStorageClient(cmd.getPoolType(), cmd.getPoolUuid(), cmd.getDetails());
|
||||
if (!prepareStorageClientResult.first()) {
|
||||
String msg = prepareStorageClientResult.third();
|
||||
s_logger.debug("Unable to prepare storage client, due to: " + msg);
|
||||
logger.debug("Unable to prepare storage client, due to: " + msg);
|
||||
return new PrepareStorageClientAnswer(cmd, false, msg);
|
||||
}
|
||||
Map<String, String> details = prepareStorageClientResult.second();
|
||||
|
||||
@ -19,8 +19,6 @@
|
||||
|
||||
package com.cloud.hypervisor.kvm.resource.wrapper;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.agent.api.Answer;
|
||||
import com.cloud.agent.api.UnprepareStorageClientAnswer;
|
||||
import com.cloud.agent.api.UnprepareStorageClientCommand;
|
||||
@ -33,15 +31,13 @@ import com.cloud.utils.Pair;
|
||||
@ResourceWrapper(handles = UnprepareStorageClientCommand.class)
|
||||
public class LibvirtUnprepareStorageClientCommandWrapper extends CommandWrapper<UnprepareStorageClientCommand, Answer, LibvirtComputingResource> {
|
||||
|
||||
private static final Logger s_logger = Logger.getLogger(LibvirtUnprepareStorageClientCommandWrapper.class);
|
||||
|
||||
@Override
|
||||
public Answer execute(UnprepareStorageClientCommand cmd, LibvirtComputingResource libvirtComputingResource) {
|
||||
final KVMStoragePoolManager storagePoolMgr = libvirtComputingResource.getStoragePoolMgr();
|
||||
Pair<Boolean, String> unprepareStorageClientResult = storagePoolMgr.unprepareStorageClient(cmd.getPoolType(), cmd.getPoolUuid());
|
||||
if (!unprepareStorageClientResult.first()) {
|
||||
String msg = unprepareStorageClientResult.second();
|
||||
s_logger.debug("Couldn't unprepare storage client, due to: " + msg);
|
||||
logger.debug("Couldn't unprepare storage client, due to: " + msg);
|
||||
return new UnprepareStorageClientAnswer(cmd, false, msg);
|
||||
}
|
||||
return new UnprepareStorageClientAnswer(cmd, true);
|
||||
|
||||
@ -57,8 +57,6 @@ import com.cloud.storage.Storage.StoragePoolType;
|
||||
import com.cloud.utils.crypt.DBEncryptionUtil;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
import com.cloud.host.Host;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
/**
|
||||
* Manages the lifecycle of a Managed Data Store in CloudStack
|
||||
@ -66,8 +64,6 @@ import org.apache.logging.log4j.Logger;
|
||||
public class AdaptiveDataStoreLifeCycleImpl extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
@Inject
|
||||
private PrimaryDataStoreDao _storagePoolDao;
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
PrimaryDataStoreHelper _dataStoreHelper;
|
||||
@Inject
|
||||
|
||||
@ -26,9 +26,6 @@ import java.util.StringTokenizer;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
|
||||
@ -67,8 +64,6 @@ import com.cloud.storage.dao.StoragePoolHostDao;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
public class ElastistorPrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
HostDao _hostDao;
|
||||
@Inject
|
||||
|
||||
@ -51,8 +51,6 @@ import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
|
||||
import org.apache.cloudstack.storage.datastore.util.DateraUtil;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.util.ArrayList;
|
||||
@ -60,8 +58,6 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class DateraPrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
private CapacityManager _capacityMgr;
|
||||
@Inject
|
||||
|
||||
@ -64,8 +64,6 @@ import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
|
||||
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import javax.inject.Inject;
|
||||
import java.util.ArrayList;
|
||||
@ -74,7 +72,6 @@ import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
public class CloudStackPrimaryDataStoreLifeCycleImpl extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
@Inject
|
||||
protected ResourceManager _resourceMgr;
|
||||
@Inject
|
||||
|
||||
@ -51,12 +51,8 @@ import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
|
||||
import org.apache.cloudstack.storage.datastore.util.LinstorUtil;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
public class LinstorPrimaryDataStoreLifeCycleImpl extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
private ClusterDao clusterDao;
|
||||
@Inject
|
||||
|
||||
@ -33,8 +33,6 @@ import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
|
||||
import org.apache.cloudstack.storage.datastore.lifecycle.BasePrimaryDataStoreLifeCycleImpl;
|
||||
import org.apache.cloudstack.storage.datastore.util.NexentaUtil;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import com.cloud.agent.api.StoragePoolInfo;
|
||||
import com.cloud.dc.DataCenterVO;
|
||||
@ -49,7 +47,6 @@ import com.cloud.storage.StoragePoolAutomation;
|
||||
public class NexentaPrimaryDataStoreLifeCycle
|
||||
extends BasePrimaryDataStoreLifeCycleImpl
|
||||
implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
private DataCenterDao zoneDao;
|
||||
|
||||
@ -47,8 +47,6 @@ import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient;
|
||||
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import com.cloud.agent.AgentManager;
|
||||
import com.cloud.agent.api.Answer;
|
||||
@ -76,8 +74,6 @@ import com.cloud.utils.crypt.DBEncryptionUtil;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
public class ScaleIOPrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
private ClusterDao clusterDao;
|
||||
@Inject
|
||||
|
||||
@ -30,7 +30,8 @@ import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClientConnec
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.cloud.agent.AgentManager;
|
||||
@ -51,7 +52,7 @@ import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
@Component
|
||||
public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
private static final Logger LOGGER = Logger.getLogger(ScaleIOSDCManagerImpl.class);
|
||||
private Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
AgentManager agentManager;
|
||||
@ -79,14 +80,14 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
|
||||
int connectedSdcsCount = getScaleIOClient(storagePoolId).getConnectedSdcsCount();
|
||||
if (connectedSdcsCount < connectedClientsLimit) {
|
||||
LOGGER.debug(String.format("Current connected SDCs count: %d - SDC connections are within the limit (%d) on PowerFlex Storage with pool id: %d", connectedSdcsCount, connectedClientsLimit, storagePoolId));
|
||||
logger.debug(String.format("Current connected SDCs count: %d - SDC connections are within the limit (%d) on PowerFlex Storage with pool id: %d", connectedSdcsCount, connectedClientsLimit, storagePoolId));
|
||||
return true;
|
||||
}
|
||||
LOGGER.debug(String.format("Current connected SDCs count: %d - SDC connections limit (%d) reached on PowerFlex Storage with pool id: %d", connectedSdcsCount, connectedClientsLimit, storagePoolId));
|
||||
logger.debug(String.format("Current connected SDCs count: %d - SDC connections limit (%d) reached on PowerFlex Storage with pool id: %d", connectedSdcsCount, connectedClientsLimit, storagePoolId));
|
||||
return false;
|
||||
} catch (Exception e) {
|
||||
String errMsg = "Unable to check SDC connections for the PowerFlex storage pool with id: " + storagePoolId + " due to " + e.getMessage();
|
||||
LOGGER.warn(errMsg, e);
|
||||
logger.warn(errMsg, e);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@ -109,7 +110,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
|
||||
int storagePoolMaxWaitSeconds = NumbersUtil.parseInt(configDao.getValue(Config.StoragePoolMaxWaitSeconds.key()), 3600);
|
||||
if (!hostIdStorageSystemIdLock.lock(storagePoolMaxWaitSeconds)) {
|
||||
LOGGER.debug("Unable to prepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
logger.debug("Unable to prepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
throw new CloudRuntimeException("Unable to prepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
}
|
||||
|
||||
@ -117,25 +118,25 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
long hostId = host.getId();
|
||||
String sdcId = getConnectedSdc(poolId, hostId);
|
||||
if (StringUtils.isNotBlank(sdcId)) {
|
||||
LOGGER.debug(String.format("SDC %s already connected for the pool: %d on host: %d, no need to prepare/start it", sdcId, poolId, hostId));
|
||||
logger.debug(String.format("SDC %s already connected for the pool: %d on host: %d, no need to prepare/start it", sdcId, poolId, hostId));
|
||||
return sdcId;
|
||||
}
|
||||
|
||||
String storageSystemIdLockString = String.format(POWERFLEX_SDC_SYSTEMID_LOCK_FORMAT, systemId);
|
||||
storageSystemIdLock = GlobalLock.getInternLock(storageSystemIdLockString);
|
||||
if (storageSystemIdLock == null) {
|
||||
LOGGER.error("Unable to prepare SDC, couldn't get global lock on: " + storageSystemIdLockString);
|
||||
logger.error("Unable to prepare SDC, couldn't get global lock on: " + storageSystemIdLockString);
|
||||
throw new CloudRuntimeException("Unable to prepare SDC, couldn't get global lock on " + storageSystemIdLockString);
|
||||
}
|
||||
|
||||
if (!storageSystemIdLock.lock(storagePoolMaxWaitSeconds)) {
|
||||
LOGGER.error("Unable to prepare SDC, couldn't lock on " + storageSystemIdLockString);
|
||||
logger.error("Unable to prepare SDC, couldn't lock on " + storageSystemIdLockString);
|
||||
throw new CloudRuntimeException("Unable to prepare SDC, couldn't lock on " + storageSystemIdLockString);
|
||||
}
|
||||
|
||||
if (!areSDCConnectionsWithinLimit(poolId)) {
|
||||
String errorMsg = String.format("Unable to check SDC connections or the connections limit reached for Powerflex storage (System ID: %s)", systemId);
|
||||
LOGGER.error(errorMsg);
|
||||
logger.error(errorMsg);
|
||||
throw new CloudRuntimeException(errorMsg);
|
||||
}
|
||||
|
||||
@ -174,7 +175,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
}
|
||||
|
||||
private String prepareSDCOnHost(Host host, DataStore dataStore, String systemId) {
|
||||
LOGGER.debug(String.format("Preparing SDC on the host %s (%s)", host.getId(), host.getName()));
|
||||
logger.debug(String.format("Preparing SDC on the host %s (%s)", host.getId(), host.getName()));
|
||||
Map<String,String> details = new HashMap<>();
|
||||
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, systemId);
|
||||
PrepareStorageClientCommand cmd = new PrepareStorageClientCommand(((PrimaryDataStore) dataStore).getPoolType(), dataStore.getUuid(), details);
|
||||
@ -186,25 +187,25 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
prepareStorageClientAnswer = (PrepareStorageClientAnswer) agentManager.send(host.getId(), cmd);
|
||||
} catch (AgentUnavailableException | OperationTimedoutException e) {
|
||||
String err = String.format("Failed to prepare SDC on the host %s, due to: %s", host.getName(), e.getMessage());
|
||||
LOGGER.error(err);
|
||||
logger.error(err);
|
||||
throw new CloudRuntimeException(err);
|
||||
}
|
||||
|
||||
if (prepareStorageClientAnswer == null) {
|
||||
String err = String.format("Unable to prepare SDC on the host %s", host.getName());
|
||||
LOGGER.error(err);
|
||||
logger.error(err);
|
||||
throw new CloudRuntimeException(err);
|
||||
}
|
||||
|
||||
if (!prepareStorageClientAnswer.getResult()) {
|
||||
String err = String.format("Unable to prepare SDC on the host %s, due to: %s", host.getName(), prepareStorageClientAnswer.getDetails());
|
||||
LOGGER.error(err);
|
||||
logger.error(err);
|
||||
throw new CloudRuntimeException(err);
|
||||
}
|
||||
|
||||
Map<String,String> poolDetails = prepareStorageClientAnswer.getDetailsMap();
|
||||
if (MapUtils.isEmpty(poolDetails)) {
|
||||
LOGGER.warn(String.format("PowerFlex storage SDC details not found on the host: %s, try (re)install SDC and restart agent", host.getId()));
|
||||
logger.warn(String.format("PowerFlex storage SDC details not found on the host: %s, try (re)install SDC and restart agent", host.getId()));
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -217,7 +218,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
LOGGER.warn(String.format("Couldn't retrieve PowerFlex storage SDC details from the host: %s, try (re)install SDC and restart agent", host.getId()));
|
||||
logger.warn(String.format("Couldn't retrieve PowerFlex storage SDC details from the host: %s, try (re)install SDC and restart agent", host.getId()));
|
||||
return null;
|
||||
}
|
||||
|
||||
@ -241,7 +242,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
|
||||
int storagePoolMaxWaitSeconds = NumbersUtil.parseInt(configDao.getValue(Config.StoragePoolMaxWaitSeconds.key()), 3600);
|
||||
if (!lock.lock(storagePoolMaxWaitSeconds)) {
|
||||
LOGGER.debug("Unable to unprepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
logger.debug("Unable to unprepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
throw new CloudRuntimeException("Unable to unprepare SDC, couldn't lock on " + hostIdStorageSystemIdLockString);
|
||||
}
|
||||
|
||||
@ -249,7 +250,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
long hostId = host.getId();
|
||||
String sdcId = getConnectedSdc(poolId, hostId);
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
LOGGER.debug("SDC not connected, no need to unprepare it");
|
||||
logger.debug("SDC not connected, no need to unprepare it");
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -263,7 +264,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
}
|
||||
|
||||
private boolean unprepareSDCOnHost(Host host, DataStore dataStore) {
|
||||
LOGGER.debug(String.format("Unpreparing SDC on the host %s (%s)", host.getId(), host.getName()));
|
||||
logger.debug(String.format("Unpreparing SDC on the host %s (%s)", host.getId(), host.getName()));
|
||||
UnprepareStorageClientCommand cmd = new UnprepareStorageClientCommand(((PrimaryDataStore) dataStore).getPoolType(), dataStore.getUuid());
|
||||
int timeoutSeconds = 60;
|
||||
cmd.setWait(timeoutSeconds);
|
||||
@ -273,13 +274,13 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
unprepareStorageClientAnswer = agentManager.send(host.getId(), cmd);
|
||||
} catch (AgentUnavailableException | OperationTimedoutException e) {
|
||||
String err = String.format("Failed to unprepare SDC on the host %s due to: %s", host.getName(), e.getMessage());
|
||||
LOGGER.error(err);
|
||||
logger.error(err);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!unprepareStorageClientAnswer.getResult()) {
|
||||
String err = String.format("Unable to unprepare SDC on the the host %s due to: %s", host.getName(), unprepareStorageClientAnswer.getDetails());
|
||||
LOGGER.error(err);
|
||||
logger.error(err);
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
@ -287,11 +288,11 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
|
||||
private String getHostSdcId(String sdcGuid, long poolId) {
|
||||
try {
|
||||
LOGGER.debug(String.format("Try to get host SDC Id for pool: %s, with SDC guid %s", poolId, sdcGuid));
|
||||
logger.debug(String.format("Try to get host SDC Id for pool: %s, with SDC guid %s", poolId, sdcGuid));
|
||||
ScaleIOGatewayClient client = getScaleIOClient(poolId);
|
||||
return client.getSdcIdByGuid(sdcGuid);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error(String.format("Failed to get host SDC Id for pool: %s", poolId), e);
|
||||
logger.error(String.format("Failed to get host SDC Id for pool: %s", poolId), e);
|
||||
throw new CloudRuntimeException(String.format("Failed to establish connection with PowerFlex Gateway to get host SDC Id for pool: %s", poolId));
|
||||
}
|
||||
}
|
||||
@ -308,14 +309,14 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
return poolHostVO.getLocalPath();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Unable to get connected SDC for the host: " + hostId + " and storage pool: " + poolId + " due to " + e.getMessage(), e);
|
||||
logger.warn("Unable to get connected SDC for the host: " + hostId + " and storage pool: " + poolId + " due to " + e.getMessage(), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private boolean hostSdcConnected(String sdcId, long poolId, int waitTimeInSecs) {
|
||||
LOGGER.debug(String.format("Waiting (for %d secs) for the SDC %s of the pool id: %d to connect", waitTimeInSecs, sdcId, poolId));
|
||||
logger.debug(String.format("Waiting (for %d secs) for the SDC %s of the pool id: %d to connect", waitTimeInSecs, sdcId, poolId));
|
||||
int timeBetweenTries = 1000; // Try more frequently (every sec) and return early if connected
|
||||
while (waitTimeInSecs > 0) {
|
||||
if (isHostSdcConnected(sdcId, poolId)) {
|
||||
@ -335,7 +336,7 @@ public class ScaleIOSDCManagerImpl implements ScaleIOSDCManager {
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(poolId);
|
||||
return client.isSdcConnected(sdcId);
|
||||
} catch (Exception e) {
|
||||
LOGGER.error("Failed to check host SDC connection", e);
|
||||
logger.error("Failed to check host SDC connection", e);
|
||||
throw new CloudRuntimeException("Failed to establish connection with PowerFlex Gateway to check host SDC connection");
|
||||
}
|
||||
}
|
||||
|
||||
@ -25,9 +25,6 @@ import java.util.UUID;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.HostScope;
|
||||
@ -65,8 +62,6 @@ import com.cloud.utils.exception.CloudRuntimeException;
|
||||
import com.google.common.base.Preconditions;
|
||||
|
||||
public class SolidFirePrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject private CapacityManager _capacityMgr;
|
||||
@Inject private ClusterDao _clusterDao;
|
||||
@Inject private DataCenterDao _zoneDao;
|
||||
|
||||
@ -26,9 +26,6 @@ import java.util.Map;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import org.apache.cloudstack.context.CallContext;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
|
||||
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
|
||||
@ -74,8 +71,6 @@ import com.cloud.utils.db.GlobalLock;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
public class SolidFireSharedPrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject private AccountDao accountDao;
|
||||
@Inject private AccountDetailsDao accountDetailsDao;
|
||||
@Inject private AgentManager agentMgr;
|
||||
|
||||
@ -38,8 +38,6 @@ import org.apache.cloudstack.storage.datastore.util.StorPoolUtil;
|
||||
import org.apache.cloudstack.storage.datastore.util.StorPoolUtil.SpApiResponse;
|
||||
import org.apache.cloudstack.storage.datastore.util.StorPoolUtil.SpConnectionDesc;
|
||||
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
|
||||
import com.cloud.agent.api.StoragePoolInfo;
|
||||
import com.cloud.host.HostVO;
|
||||
@ -62,8 +60,6 @@ import com.cloud.storage.dao.VMTemplatePoolDao;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
public class StorPoolPrimaryDataStoreLifeCycle extends BasePrimaryDataStoreLifeCycleImpl implements PrimaryDataStoreLifeCycle {
|
||||
protected Logger logger = LogManager.getLogger(getClass());
|
||||
|
||||
@Inject
|
||||
protected PrimaryDataStoreHelper dataStoreHelper;
|
||||
@Inject
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user