mirror of
https://github.com/apache/cloudstack.git
synced 2025-10-26 08:42:29 +01:00
kvm: Updated PowerFlex/ScaleIO storage plugin to support separate (storage) network for Hosts(KVM)/Storage connection. (#6367)
This PR enhances the existing PowerFlex/ScaleIO storage plugin to support separate (storage) network for Hosts(KVM)/Storage connection, mainly the SDC (ScaleIo Data Client) connection.
This commit is contained in:
parent
14c5250267
commit
c70bc9d69c
@ -30,14 +30,18 @@ public class ModifyStoragePoolAnswer extends Answer {
|
||||
private Map<String, TemplateProp> templateInfo;
|
||||
private String localDatastoreName;
|
||||
private String poolType;
|
||||
private List<ModifyStoragePoolAnswer> datastoreClusterChildren = new ArrayList<>();;
|
||||
private List<ModifyStoragePoolAnswer> datastoreClusterChildren = new ArrayList<>();
|
||||
|
||||
public ModifyStoragePoolAnswer(ModifyStoragePoolCommand cmd, long capacityBytes, long availableBytes, Map<String, TemplateProp> tInfo) {
|
||||
this(cmd, capacityBytes, availableBytes, tInfo, null);
|
||||
}
|
||||
|
||||
public ModifyStoragePoolAnswer(ModifyStoragePoolCommand cmd, long capacityBytes, long availableBytes, Map<String, TemplateProp> tInfo, Map<String, String> details) {
|
||||
super(cmd);
|
||||
|
||||
result = true;
|
||||
|
||||
poolInfo = new StoragePoolInfo(null, cmd.getPool().getHost(), cmd.getPool().getPath(), cmd.getLocalPath(), cmd.getPool().getType(), capacityBytes, availableBytes);
|
||||
poolInfo = new StoragePoolInfo(null, cmd.getPool().getHost(), cmd.getPool().getPath(), cmd.getLocalPath(), cmd.getPool().getType(), capacityBytes, availableBytes, details);
|
||||
|
||||
templateInfo = tInfo;
|
||||
}
|
||||
|
||||
@ -20,6 +20,7 @@
|
||||
package com.cloud.agent.api;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.cloud.agent.api.to.StorageFilerTO;
|
||||
@ -32,6 +33,7 @@ public class ModifyStoragePoolCommand extends Command {
|
||||
private StorageFilerTO pool;
|
||||
private String localPath;
|
||||
private String storagePath;
|
||||
private Map<String, String> details;
|
||||
|
||||
public ModifyStoragePoolCommand(boolean add, StoragePool pool, String localPath) {
|
||||
this.add = add;
|
||||
@ -39,6 +41,11 @@ public class ModifyStoragePoolCommand extends Command {
|
||||
this.localPath = localPath;
|
||||
}
|
||||
|
||||
public ModifyStoragePoolCommand(boolean add, StoragePool pool, String localPath, Map<String, String> details) {
|
||||
this(add, pool, localPath);
|
||||
this.details = details;
|
||||
}
|
||||
|
||||
public ModifyStoragePoolCommand(boolean add, StoragePool pool) {
|
||||
this(add, pool, LOCAL_PATH_PREFIX + File.separator + UUID.nameUUIDFromBytes((pool.getHostAddress() + pool.getPath()).getBytes()));
|
||||
}
|
||||
@ -67,6 +74,14 @@ public class ModifyStoragePoolCommand extends Command {
|
||||
return storagePath;
|
||||
}
|
||||
|
||||
public void setDetails(Map<String, String> details) {
|
||||
this.details = details;
|
||||
}
|
||||
|
||||
public Map<String, String> getDetails() {
|
||||
return details;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean executeInSequence() {
|
||||
return false;
|
||||
|
||||
@ -276,7 +276,7 @@ public interface StorageManager extends StorageService {
|
||||
|
||||
boolean registerHostListener(String providerUuid, HypervisorHostListener listener);
|
||||
|
||||
void connectHostToSharedPool(long hostId, long poolId) throws StorageUnavailableException, StorageConflictException;
|
||||
boolean connectHostToSharedPool(long hostId, long poolId) throws StorageUnavailableException, StorageConflictException;
|
||||
|
||||
void disconnectHostFromSharedPool(long hostId, long poolId) throws StorageUnavailableException, StorageConflictException;
|
||||
|
||||
|
||||
@ -40,13 +40,13 @@ public final class LibvirtModifyStoragePoolCommandWrapper extends CommandWrapper
|
||||
final KVMStoragePoolManager storagePoolMgr = libvirtComputingResource.getStoragePoolMgr();
|
||||
final KVMStoragePool storagepool =
|
||||
storagePoolMgr.createStoragePool(command.getPool().getUuid(), command.getPool().getHost(), command.getPool().getPort(), command.getPool().getPath(), command.getPool()
|
||||
.getUserInfo(), command.getPool().getType());
|
||||
.getUserInfo(), command.getPool().getType(), command.getDetails());
|
||||
if (storagepool == null) {
|
||||
return new Answer(command, false, " Failed to create storage pool");
|
||||
}
|
||||
|
||||
final Map<String, TemplateProp> tInfo = new HashMap<String, TemplateProp>();
|
||||
final ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(command, storagepool.getCapacity(), storagepool.getAvailable(), tInfo);
|
||||
final ModifyStoragePoolAnswer answer = new ModifyStoragePoolAnswer(command, storagepool.getCapacity(), storagepool.getAvailable(), tInfo, storagepool.getDetails());
|
||||
|
||||
return answer;
|
||||
}
|
||||
|
||||
@ -44,7 +44,7 @@ public class IscsiAdmStorageAdaptor implements StorageAdaptor {
|
||||
private static final Map<String, KVMStoragePool> MapStorageUuidToStoragePool = new HashMap<>();
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType) {
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType, Map<String, String> details) {
|
||||
IscsiAdmStoragePool storagePool = new IscsiAdmStoragePool(uuid, host, port, storagePoolType, this);
|
||||
|
||||
MapStorageUuidToStoragePool.put(uuid, storagePool);
|
||||
|
||||
@ -173,6 +173,11 @@ public class IscsiAdmStoragePool implements KVMStoragePool {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getDetails() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.JSON_STYLE).append("uuid", getUuid()).append("path", getLocalPath()).toString();
|
||||
|
||||
@ -72,4 +72,6 @@ public interface KVMStoragePool {
|
||||
public boolean createFolder(String path);
|
||||
|
||||
public boolean supportsConfigDriveIso();
|
||||
|
||||
public Map<String, String> getDetails();
|
||||
}
|
||||
|
||||
@ -56,8 +56,9 @@ public class KVMStoragePoolManager {
|
||||
String userInfo;
|
||||
boolean type;
|
||||
StoragePoolType poolType;
|
||||
Map<String, String> details;
|
||||
|
||||
public StoragePoolInformation(String name, String host, int port, String path, String userInfo, StoragePoolType poolType, boolean type) {
|
||||
public StoragePoolInformation(String name, String host, int port, String path, String userInfo, StoragePoolType poolType, Map<String, String> details, boolean type) {
|
||||
this.name = name;
|
||||
this.host = host;
|
||||
this.port = port;
|
||||
@ -65,6 +66,7 @@ public class KVMStoragePoolManager {
|
||||
this.userInfo = userInfo;
|
||||
this.type = type;
|
||||
this.poolType = poolType;
|
||||
this.details = details;
|
||||
}
|
||||
}
|
||||
|
||||
@ -270,7 +272,7 @@ public class KVMStoragePoolManager {
|
||||
} catch (Exception e) {
|
||||
StoragePoolInformation info = _storagePools.get(uuid);
|
||||
if (info != null) {
|
||||
pool = createStoragePool(info.name, info.host, info.port, info.path, info.userInfo, info.poolType, info.type);
|
||||
pool = createStoragePool(info.name, info.host, info.port, info.path, info.userInfo, info.poolType, info.details, info.type);
|
||||
} else {
|
||||
throw new CloudRuntimeException("Could not fetch storage pool " + uuid + " from libvirt due to " + e.getMessage());
|
||||
}
|
||||
@ -300,7 +302,7 @@ public class KVMStoragePoolManager {
|
||||
}
|
||||
|
||||
// secondary storage registers itself through here
|
||||
return createStoragePool(uuid, sourceHost, 0, sourcePath, "", protocol, false);
|
||||
return createStoragePool(uuid, sourceHost, 0, sourcePath, "", protocol, null, false);
|
||||
}
|
||||
|
||||
public KVMPhysicalDisk getPhysicalDisk(StoragePoolType type, String poolUuid, String volName) {
|
||||
@ -341,20 +343,27 @@ public class KVMStoragePoolManager {
|
||||
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type) {
|
||||
// primary storage registers itself through here
|
||||
return createStoragePool(name, host, port, path, userInfo, type, true);
|
||||
return createStoragePool(name, host, port, path, userInfo, type, null, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Primary Storage registers itself through here
|
||||
*/
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type, Map<String, String> details) {
|
||||
return createStoragePool(name, host, port, path, userInfo, type, details, true);
|
||||
}
|
||||
|
||||
//Note: due to bug CLOUDSTACK-4459, createStoragepool can be called in parallel, so need to be synced.
|
||||
private synchronized KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type, boolean primaryStorage) {
|
||||
private synchronized KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type, Map<String, String> details, boolean primaryStorage) {
|
||||
StorageAdaptor adaptor = getStorageAdaptor(type);
|
||||
KVMStoragePool pool = adaptor.createStoragePool(name, host, port, path, userInfo, type);
|
||||
KVMStoragePool pool = adaptor.createStoragePool(name, host, port, path, userInfo, type, details);
|
||||
|
||||
// LibvirtStorageAdaptor-specific statement
|
||||
if (type == StoragePoolType.NetworkFilesystem && primaryStorage) {
|
||||
KVMHABase.NfsStoragePool nfspool = new KVMHABase.NfsStoragePool(pool.getUuid(), host, path, pool.getLocalPath(), PoolType.PrimaryStorage);
|
||||
_haMonitor.addStoragePool(nfspool);
|
||||
}
|
||||
StoragePoolInformation info = new StoragePoolInformation(name, host, port, path, userInfo, type, primaryStorage);
|
||||
StoragePoolInformation info = new StoragePoolInformation(name, host, port, path, userInfo, type, details, primaryStorage);
|
||||
addStoragePool(pool.getUuid(), info);
|
||||
return pool;
|
||||
}
|
||||
|
||||
@ -576,7 +576,7 @@ public class LibvirtStorageAdaptor implements StorageAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type) {
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type, Map<String, String> details) {
|
||||
s_logger.info("Attempting to create storage pool " + name + " (" + type.toString() + ") in libvirt");
|
||||
|
||||
StoragePool sp = null;
|
||||
|
||||
@ -282,6 +282,11 @@ public class LibvirtStoragePool implements KVMStoragePool {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getDetails() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return new ToStringBuilder(this, ToStringStyle.JSON_STYLE).append("uuid", getUuid()).append("path", getLocalPath()).toString();
|
||||
|
||||
@ -174,7 +174,7 @@ public class LinstorStorageAdaptor implements StorageAdaptor {
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo,
|
||||
Storage.StoragePoolType type)
|
||||
Storage.StoragePoolType type, Map<String, String> details)
|
||||
{
|
||||
s_logger.debug(String.format(
|
||||
"Linstor createStoragePool: name: '%s', host: '%s', path: %s, userinfo: %s", name, host, path, userInfo));
|
||||
|
||||
@ -185,6 +185,11 @@ public class LinstorStoragePool implements KVMStoragePool {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getDetails() {
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getResourceGroup() {
|
||||
return _resourceGroup;
|
||||
}
|
||||
|
||||
@ -55,7 +55,7 @@ public class ManagedNfsStorageAdaptor implements StorageAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType) {
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType, Map<String, String> details) {
|
||||
|
||||
LibvirtStoragePool storagePool = new LibvirtStoragePool(uuid, path, StoragePoolType.ManagedNFS, this, null);
|
||||
storagePool.setSourceHost(host);
|
||||
|
||||
@ -117,8 +117,8 @@ public class ScaleIOStorageAdaptor implements StorageAdaptor {
|
||||
}
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, Storage.StoragePoolType type) {
|
||||
ScaleIOStoragePool storagePool = new ScaleIOStoragePool(uuid, host, port, path, type, this);
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, Storage.StoragePoolType type, Map<String, String> details) {
|
||||
ScaleIOStoragePool storagePool = new ScaleIOStoragePool(uuid, host, port, path, type, details, this);
|
||||
MapStorageUuidToStoragePool.put(uuid, storagePool);
|
||||
return storagePool;
|
||||
}
|
||||
|
||||
@ -20,6 +20,8 @@ package com.cloud.hypervisor.kvm.storage;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient;
|
||||
import org.apache.cloudstack.storage.datastore.util.ScaleIOUtil;
|
||||
import org.apache.cloudstack.utils.qemu.QemuImg;
|
||||
|
||||
import com.cloud.storage.Storage;
|
||||
@ -34,8 +36,9 @@ public class ScaleIOStoragePool implements KVMStoragePool {
|
||||
private long capacity;
|
||||
private long used;
|
||||
private long available;
|
||||
private Map<String, String> details;
|
||||
|
||||
public ScaleIOStoragePool(String uuid, String host, int port, String path, Storage.StoragePoolType poolType, StorageAdaptor adaptor) {
|
||||
public ScaleIOStoragePool(String uuid, String host, int port, String path, Storage.StoragePoolType poolType, Map<String, String> poolDetails, StorageAdaptor adaptor) {
|
||||
this.uuid = uuid;
|
||||
sourceHost = host;
|
||||
sourcePort = port;
|
||||
@ -45,6 +48,25 @@ public class ScaleIOStoragePool implements KVMStoragePool {
|
||||
capacity = 0;
|
||||
used = 0;
|
||||
available = 0;
|
||||
details = poolDetails;
|
||||
addSDCDetails();
|
||||
}
|
||||
|
||||
private void addSDCDetails() {
|
||||
if (details == null || !details.containsKey(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID)) {
|
||||
return;
|
||||
}
|
||||
|
||||
String storageSystemId = details.get(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID);
|
||||
String sdcId = ScaleIOUtil.getSdcId(storageSystemId);
|
||||
if (sdcId != null) {
|
||||
details.put(ScaleIOGatewayClient.SDC_ID, sdcId);
|
||||
} else {
|
||||
String sdcGuId = ScaleIOUtil.getSdcGuid();
|
||||
if (sdcGuId != null) {
|
||||
details.put(ScaleIOGatewayClient.SDC_GUID, sdcGuId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -178,4 +200,9 @@ public class ScaleIOStoragePool implements KVMStoragePool {
|
||||
public boolean supportsConfigDriveIso() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getDetails() {
|
||||
return this.details;
|
||||
}
|
||||
}
|
||||
|
||||
@ -35,7 +35,7 @@ public interface StorageAdaptor {
|
||||
// it with info from local disk, and return it
|
||||
public KVMPhysicalDisk getPhysicalDisk(String volumeUuid, KVMStoragePool pool);
|
||||
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type);
|
||||
public KVMStoragePool createStoragePool(String name, String host, int port, String path, String userInfo, StoragePoolType type, Map<String, String> details);
|
||||
|
||||
public boolean deleteStoragePool(String uuid);
|
||||
|
||||
|
||||
@ -2765,10 +2765,9 @@ public class LibvirtComputingResourceTest {
|
||||
final KVMStoragePoolManager storagePoolMgr = Mockito.mock(KVMStoragePoolManager.class);
|
||||
final KVMStoragePool kvmStoragePool = Mockito.mock(KVMStoragePool.class);
|
||||
|
||||
|
||||
when(libvirtComputingResource.getStoragePoolMgr()).thenReturn(storagePoolMgr);
|
||||
when(storagePoolMgr.createStoragePool(command.getPool().getUuid(), command.getPool().getHost(), command.getPool().getPort(), command.getPool().getPath(), command.getPool()
|
||||
.getUserInfo(), command.getPool().getType())).thenReturn(kvmStoragePool);
|
||||
.getUserInfo(), command.getPool().getType(), command.getDetails())).thenReturn(kvmStoragePool);
|
||||
|
||||
|
||||
final LibvirtRequestWrapper wrapper = LibvirtRequestWrapper.getInstance();
|
||||
@ -2779,7 +2778,7 @@ public class LibvirtComputingResourceTest {
|
||||
|
||||
verify(libvirtComputingResource, times(1)).getStoragePoolMgr();
|
||||
verify(storagePoolMgr, times(1)).createStoragePool(command.getPool().getUuid(), command.getPool().getHost(), command.getPool().getPort(), command.getPool().getPath(), command.getPool()
|
||||
.getUserInfo(), command.getPool().getType());
|
||||
.getUserInfo(), command.getPool().getType(), command.getDetails());
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -2791,7 +2790,7 @@ public class LibvirtComputingResourceTest {
|
||||
|
||||
when(libvirtComputingResource.getStoragePoolMgr()).thenReturn(storagePoolMgr);
|
||||
when(storagePoolMgr.createStoragePool(command.getPool().getUuid(), command.getPool().getHost(), command.getPool().getPort(), command.getPool().getPath(), command.getPool()
|
||||
.getUserInfo(), command.getPool().getType())).thenReturn(null);
|
||||
.getUserInfo(), command.getPool().getType(), command.getDetails())).thenReturn(null);
|
||||
|
||||
|
||||
final LibvirtRequestWrapper wrapper = LibvirtRequestWrapper.getInstance();
|
||||
@ -2802,7 +2801,7 @@ public class LibvirtComputingResourceTest {
|
||||
|
||||
verify(libvirtComputingResource, times(1)).getStoragePoolMgr();
|
||||
verify(storagePoolMgr, times(1)).createStoragePool(command.getPool().getUuid(), command.getPool().getHost(), command.getPool().getPort(), command.getPool().getPath(), command.getPool()
|
||||
.getUserInfo(), command.getPool().getType());
|
||||
.getUserInfo(), command.getPool().getType(), command.getDetails());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
@ -26,7 +26,10 @@ import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient;
|
||||
import org.apache.cloudstack.storage.datastore.util.ScaleIOUtil;
|
||||
import org.apache.cloudstack.utils.qemu.QemuImg;
|
||||
import org.apache.cloudstack.utils.qemu.QemuImg.PhysicalDiskFormat;
|
||||
@ -42,8 +45,9 @@ import org.powermock.modules.junit4.PowerMockRunner;
|
||||
|
||||
import com.cloud.storage.Storage.StoragePoolType;
|
||||
import com.cloud.storage.StorageLayer;
|
||||
import com.cloud.utils.script.Script;
|
||||
|
||||
@PrepareForTest(ScaleIOUtil.class)
|
||||
@PrepareForTest({ScaleIOUtil.class, Script.class})
|
||||
@RunWith(PowerMockRunner.class)
|
||||
public class ScaleIOStoragePoolTest {
|
||||
|
||||
@ -57,10 +61,13 @@ public class ScaleIOStoragePoolTest {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
final String uuid = "345fc603-2d7e-47d2-b719-a0110b3732e6";
|
||||
final String systemId = "218ce1797566a00f";
|
||||
final StoragePoolType type = StoragePoolType.PowerFlex;
|
||||
Map<String,String> details = new HashMap<String, String>();
|
||||
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, systemId);
|
||||
|
||||
adapter = spy(new ScaleIOStorageAdaptor(storageLayer));
|
||||
pool = new ScaleIOStoragePool(uuid, "192.168.1.19", 443, "a519be2f00000000", type, adapter);
|
||||
pool = new ScaleIOStoragePool(uuid, "192.168.1.19", 443, "a519be2f00000000", type, details, adapter);
|
||||
}
|
||||
|
||||
@After
|
||||
@ -69,28 +76,64 @@ public class ScaleIOStoragePoolTest {
|
||||
|
||||
@Test
|
||||
public void testAttributes() {
|
||||
assertEquals(pool.getCapacity(), 0);
|
||||
assertEquals(pool.getUsed(), 0);
|
||||
assertEquals(pool.getAvailable(), 0);
|
||||
assertEquals(pool.getUuid(), "345fc603-2d7e-47d2-b719-a0110b3732e6");
|
||||
assertEquals(pool.getSourceHost(), "192.168.1.19");
|
||||
assertEquals(pool.getSourcePort(), 443);
|
||||
assertEquals(pool.getSourceDir(), "a519be2f00000000");
|
||||
assertEquals(pool.getType(), StoragePoolType.PowerFlex);
|
||||
assertEquals(0, pool.getCapacity());
|
||||
assertEquals(0, pool.getUsed());
|
||||
assertEquals(0, pool.getAvailable());
|
||||
assertEquals("345fc603-2d7e-47d2-b719-a0110b3732e6", pool.getUuid());
|
||||
assertEquals("192.168.1.19", pool.getSourceHost());
|
||||
assertEquals(443, pool.getSourcePort());
|
||||
assertEquals("a519be2f00000000", pool.getSourceDir());
|
||||
assertEquals(StoragePoolType.PowerFlex, pool.getType());
|
||||
assertEquals("218ce1797566a00f", pool.getDetails().get(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID));
|
||||
|
||||
pool.setCapacity(131072);
|
||||
pool.setUsed(24576);
|
||||
pool.setAvailable(106496);
|
||||
|
||||
assertEquals(pool.getCapacity(), 131072);
|
||||
assertEquals(pool.getUsed(), 24576);
|
||||
assertEquals(pool.getAvailable(), 106496);
|
||||
assertEquals(131072, pool.getCapacity());
|
||||
assertEquals(24576, pool.getUsed());
|
||||
assertEquals(106496, pool.getAvailable());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSdcIdAttribute() {
|
||||
final String uuid = "345fc603-2d7e-47d2-b719-a0110b3732e6";
|
||||
final String systemId = "218ce1797566a00f";
|
||||
final String sdcId = "301b852c00000003";
|
||||
final StoragePoolType type = StoragePoolType.PowerFlex;
|
||||
Map<String,String> details = new HashMap<String, String>();
|
||||
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, systemId);
|
||||
|
||||
PowerMockito.mockStatic(Script.class);
|
||||
when(Script.runSimpleBashScript("/opt/emc/scaleio/sdc/bin/drv_cfg --query_mdms|grep 218ce1797566a00f|awk '{print $5}'")).thenReturn(sdcId);
|
||||
|
||||
ScaleIOStoragePool pool1 = new ScaleIOStoragePool(uuid, "192.168.1.19", 443, "a519be2f00000000", type, details, adapter);
|
||||
assertEquals(systemId, pool1.getDetails().get(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID));
|
||||
assertEquals(sdcId, pool1.getDetails().get(ScaleIOGatewayClient.SDC_ID));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSdcGuidAttribute() {
|
||||
final String uuid = "345fc603-2d7e-47d2-b719-a0110b3732e6";
|
||||
final String systemId = "218ce1797566a00f";
|
||||
final String sdcGuid = "B0E3BFB8-C20B-43BF-93C8-13339E85AA50";
|
||||
final StoragePoolType type = StoragePoolType.PowerFlex;
|
||||
Map<String,String> details = new HashMap<String, String>();
|
||||
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, systemId);
|
||||
|
||||
PowerMockito.mockStatic(Script.class);
|
||||
when(Script.runSimpleBashScript("/opt/emc/scaleio/sdc/bin/drv_cfg --query_mdms|grep 218ce1797566a00f|awk '{print $5}'")).thenReturn(null);
|
||||
when(Script.runSimpleBashScript("/opt/emc/scaleio/sdc/bin/drv_cfg --query_guid")).thenReturn(sdcGuid);
|
||||
|
||||
ScaleIOStoragePool pool1 = new ScaleIOStoragePool(uuid, "192.168.1.19", 443, "a519be2f00000000", type, details, adapter);
|
||||
assertEquals(systemId, pool1.getDetails().get(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID));
|
||||
assertEquals(sdcGuid, pool1.getDetails().get(ScaleIOGatewayClient.SDC_GUID));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaults() {
|
||||
assertEquals(pool.getDefaultFormat(), PhysicalDiskFormat.RAW);
|
||||
assertEquals(pool.getType(), StoragePoolType.PowerFlex);
|
||||
assertEquals(PhysicalDiskFormat.RAW, pool.getDefaultFormat());
|
||||
assertEquals(StoragePoolType.PowerFlex, pool.getType());
|
||||
|
||||
assertNull(pool.getAuthUserName());
|
||||
assertNull(pool.getAuthSecret());
|
||||
@ -145,7 +188,7 @@ public class ScaleIOStoragePoolTest {
|
||||
disk.setSize(8192);
|
||||
disk.setVirtualSize(8192);
|
||||
|
||||
assertEquals(disk.getPath(), "/dev/disk/by-id/emc-vol-218ce1797566a00f-6c3362b500000001");
|
||||
assertEquals("/dev/disk/by-id/emc-vol-218ce1797566a00f-6c3362b500000001", disk.getPath());
|
||||
|
||||
when(adapter.getPhysicalDisk(volumeId, pool)).thenReturn(disk);
|
||||
|
||||
|
||||
@ -38,6 +38,8 @@ public interface ScaleIOGatewayClient {
|
||||
String GATEWAY_API_PASSWORD = "powerflex.gw.password";
|
||||
String STORAGE_POOL_NAME = "powerflex.storagepool.name";
|
||||
String STORAGE_POOL_SYSTEM_ID = "powerflex.storagepool.system.id";
|
||||
String SDC_ID = "powerflex.sdc.id";
|
||||
String SDC_GUID = "powerflex.sdc.guid";
|
||||
|
||||
static ScaleIOGatewayClient getClient(final String url, final String username, final String password,
|
||||
final boolean validateCertificate, final int timeout, final int maxConnections) throws NoSuchAlgorithmException, KeyManagementException, URISyntaxException {
|
||||
@ -81,8 +83,10 @@ public interface ScaleIOGatewayClient {
|
||||
// SDC APIs
|
||||
List<Sdc> listSdcs();
|
||||
Sdc getSdc(String sdcId);
|
||||
String getSdcIdByGuid(String sdcGuid);
|
||||
Sdc getSdcByIp(String ipAddress);
|
||||
Sdc getConnectedSdcByIp(String ipAddress);
|
||||
List<String> listConnectedSdcIps();
|
||||
boolean isSdcConnected(String ipAddress);
|
||||
boolean haveConnectedSdcs();
|
||||
boolean isSdcConnected(String sdcId);
|
||||
boolean isSdcConnectedByIP(String ipAddress);
|
||||
}
|
||||
|
||||
@ -1013,6 +1013,24 @@ public class ScaleIOGatewayClientImpl implements ScaleIOGatewayClient {
|
||||
return get("/instances/Sdc::" + sdcId, Sdc.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSdcIdByGuid(String sdcGuid) {
|
||||
Preconditions.checkArgument(StringUtils.isNotEmpty(sdcGuid), "SDC Guid cannot be null");
|
||||
|
||||
List<Sdc> sdcs = listSdcs();
|
||||
if (sdcs == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
for (Sdc sdc : sdcs) {
|
||||
if (sdcGuid.equalsIgnoreCase(sdc.getSdcGuid())) {
|
||||
return sdc.getId();
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Sdc getSdcByIp(String ipAddress) {
|
||||
Preconditions.checkArgument(StringUtils.isNotEmpty(ipAddress), "IP address cannot be null");
|
||||
@ -1035,28 +1053,35 @@ public class ScaleIOGatewayClientImpl implements ScaleIOGatewayClient {
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> listConnectedSdcIps() {
|
||||
List<String> sdcIps = new ArrayList<>();
|
||||
public boolean haveConnectedSdcs() {
|
||||
List<Sdc> sdcs = listSdcs();
|
||||
if(sdcs != null) {
|
||||
for (Sdc sdc : sdcs) {
|
||||
if (MDM_CONNECTED_STATE.equalsIgnoreCase(sdc.getMdmConnectionState())) {
|
||||
sdcIps.add(sdc.getSdcIp());
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sdcIps;
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSdcConnected(String ipAddress) {
|
||||
public boolean isSdcConnected(String sdcId) {
|
||||
Preconditions.checkArgument(StringUtils.isNotEmpty(sdcId), "SDC Id cannot be null");
|
||||
|
||||
Sdc sdc = getSdc(sdcId);
|
||||
return (sdc != null && MDM_CONNECTED_STATE.equalsIgnoreCase(sdc.getMdmConnectionState()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isSdcConnectedByIP(String ipAddress) {
|
||||
Preconditions.checkArgument(StringUtils.isNotEmpty(ipAddress), "IP address cannot be null");
|
||||
|
||||
List<Sdc> sdcs = listSdcs();
|
||||
if(sdcs != null) {
|
||||
if (sdcs != null) {
|
||||
for (Sdc sdc : sdcs) {
|
||||
if (ipAddress.equalsIgnoreCase(sdc.getSdcIp()) && MDM_CONNECTED_STATE.equalsIgnoreCase(sdc.getMdmConnectionState())) {
|
||||
if (sdc != null && ipAddress.equalsIgnoreCase(sdc.getSdcIp()) && MDM_CONNECTED_STATE.equalsIgnoreCase(sdc.getMdmConnectionState())) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@ -41,7 +41,6 @@ import org.apache.cloudstack.storage.RemoteHostEndPoint;
|
||||
import org.apache.cloudstack.storage.command.CommandResult;
|
||||
import org.apache.cloudstack.storage.command.CopyCommand;
|
||||
import org.apache.cloudstack.storage.command.CreateObjectAnswer;
|
||||
import org.apache.cloudstack.storage.datastore.api.Sdc;
|
||||
import org.apache.cloudstack.storage.datastore.api.StoragePoolStatistics;
|
||||
import org.apache.cloudstack.storage.datastore.api.VolumeStatistics;
|
||||
import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient;
|
||||
@ -72,11 +71,13 @@ import com.cloud.storage.SnapshotVO;
|
||||
import com.cloud.storage.Storage;
|
||||
import com.cloud.storage.StorageManager;
|
||||
import com.cloud.storage.StoragePool;
|
||||
import com.cloud.storage.StoragePoolHostVO;
|
||||
import com.cloud.storage.VMTemplateStoragePoolVO;
|
||||
import com.cloud.storage.Volume;
|
||||
import com.cloud.storage.VolumeDetailVO;
|
||||
import com.cloud.storage.VolumeVO;
|
||||
import com.cloud.storage.dao.SnapshotDao;
|
||||
import com.cloud.storage.dao.StoragePoolHostDao;
|
||||
import com.cloud.storage.dao.VMTemplatePoolDao;
|
||||
import com.cloud.storage.dao.VolumeDao;
|
||||
import com.cloud.storage.dao.VolumeDetailsDao;
|
||||
@ -96,6 +97,8 @@ public class ScaleIOPrimaryDataStoreDriver implements PrimaryDataStoreDriver {
|
||||
@Inject
|
||||
private StoragePoolDetailsDao storagePoolDetailsDao;
|
||||
@Inject
|
||||
private StoragePoolHostDao storagePoolHostDao;
|
||||
@Inject
|
||||
private VolumeDao volumeDao;
|
||||
@Inject
|
||||
private VolumeDetailsDao volumeDetailsDao;
|
||||
@ -144,38 +147,38 @@ public class ScaleIOPrimaryDataStoreDriver implements PrimaryDataStoreDriver {
|
||||
iopsLimit = ScaleIOUtil.MINIMUM_ALLOWED_IOPS_LIMIT;
|
||||
}
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
alertHostSdcDisconnection(host);
|
||||
throw new CloudRuntimeException("Unable to grant access to volume: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
return client.mapVolumeToSdcWithLimits(ScaleIOUtil.getVolumePath(volume.getPath()), sdc.getId(), iopsLimit, bandwidthLimitInKbps);
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
return client.mapVolumeToSdcWithLimits(ScaleIOUtil.getVolumePath(volume.getPath()), sdcId, iopsLimit, bandwidthLimitInKbps);
|
||||
} else if (DataObjectType.TEMPLATE.equals(dataObject.getType())) {
|
||||
final VMTemplateStoragePoolVO templatePoolRef = vmTemplatePoolDao.findByPoolTemplate(dataStore.getId(), dataObject.getId(), null);
|
||||
LOGGER.debug("Granting access for PowerFlex template volume: " + templatePoolRef.getInstallPath());
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
alertHostSdcDisconnection(host);
|
||||
throw new CloudRuntimeException("Unable to grant access to template: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
return client.mapVolumeToSdc(ScaleIOUtil.getVolumePath(templatePoolRef.getInstallPath()), sdc.getId());
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
return client.mapVolumeToSdc(ScaleIOUtil.getVolumePath(templatePoolRef.getInstallPath()), sdcId);
|
||||
} else if (DataObjectType.SNAPSHOT.equals(dataObject.getType())) {
|
||||
SnapshotInfo snapshot = (SnapshotInfo) dataObject;
|
||||
LOGGER.debug("Granting access for PowerFlex volume snapshot: " + snapshot.getPath());
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
alertHostSdcDisconnection(host);
|
||||
throw new CloudRuntimeException("Unable to grant access to snapshot: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
return client.mapVolumeToSdc(ScaleIOUtil.getVolumePath(snapshot.getPath()), sdc.getId());
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
return client.mapVolumeToSdc(ScaleIOUtil.getVolumePath(snapshot.getPath()), sdcId);
|
||||
}
|
||||
|
||||
return false;
|
||||
@ -191,41 +194,59 @@ public class ScaleIOPrimaryDataStoreDriver implements PrimaryDataStoreDriver {
|
||||
final VolumeVO volume = volumeDao.findById(dataObject.getId());
|
||||
LOGGER.debug("Revoking access for PowerFlex volume: " + volume.getPath());
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
throw new CloudRuntimeException("Unable to revoke access for volume: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(volume.getPath()), sdc.getId());
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(volume.getPath()), sdcId);
|
||||
} else if (DataObjectType.TEMPLATE.equals(dataObject.getType())) {
|
||||
final VMTemplateStoragePoolVO templatePoolRef = vmTemplatePoolDao.findByPoolTemplate(dataStore.getId(), dataObject.getId(), null);
|
||||
LOGGER.debug("Revoking access for PowerFlex template volume: " + templatePoolRef.getInstallPath());
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
throw new CloudRuntimeException("Unable to revoke access for template: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(templatePoolRef.getInstallPath()), sdc.getId());
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(templatePoolRef.getInstallPath()), sdcId);
|
||||
} else if (DataObjectType.SNAPSHOT.equals(dataObject.getType())) {
|
||||
SnapshotInfo snapshot = (SnapshotInfo) dataObject;
|
||||
LOGGER.debug("Revoking access for PowerFlex volume snapshot: " + snapshot.getPath());
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
final Sdc sdc = client.getConnectedSdcByIp(host.getPrivateIpAddress());
|
||||
if (sdc == null) {
|
||||
final String sdcId = getConnectedSdc(dataStore.getId(), host.getId());
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
throw new CloudRuntimeException("Unable to revoke access for snapshot: " + dataObject.getId() + ", no Sdc connected with host ip: " + host.getPrivateIpAddress());
|
||||
}
|
||||
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(snapshot.getPath()), sdc.getId());
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(dataStore.getId());
|
||||
client.unmapVolumeFromSdc(ScaleIOUtil.getVolumePath(snapshot.getPath()), sdcId);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Failed to revoke access due to: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
private String getConnectedSdc(long poolId, long hostId) {
|
||||
try {
|
||||
StoragePoolHostVO poolHostVO = storagePoolHostDao.findByPoolHost(poolId, hostId);
|
||||
if (poolHostVO == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(poolId);
|
||||
if (client.isSdcConnected(poolHostVO.getLocalPath())) {
|
||||
return poolHostVO.getLocalPath();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Couldn't check SDC connection for the host: " + hostId + " and storage pool: " + poolId + " due to " + e.getMessage(), e);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getUsedBytes(StoragePool storagePool) {
|
||||
long usedSpaceBytes = 0;
|
||||
@ -930,8 +951,12 @@ public class ScaleIOPrimaryDataStoreDriver implements PrimaryDataStoreDriver {
|
||||
}
|
||||
|
||||
try {
|
||||
StoragePoolHostVO poolHostVO = storagePoolHostDao.findByPoolHost(pool.getId(), host.getId());
|
||||
if (poolHostVO == null) {
|
||||
return false;
|
||||
}
|
||||
final ScaleIOGatewayClient client = getScaleIOClient(pool.getId());
|
||||
return client.isSdcConnected(host.getPrivateIpAddress());
|
||||
return client.isSdcConnected(poolHostVO.getLocalPath());
|
||||
} catch (Exception e) {
|
||||
LOGGER.warn("Unable to check the host: " + host.getId() + " access to storage pool: " + pool.getId() + " due to " + e.getMessage(), e);
|
||||
return false;
|
||||
|
||||
@ -258,22 +258,9 @@ public class ScaleIOPrimaryDataStoreLifeCycle implements PrimaryDataStoreLifeCyc
|
||||
throw new CloudRuntimeException("Unsupported hypervisor type: " + cluster.getHypervisorType().toString());
|
||||
}
|
||||
|
||||
List<String> connectedSdcIps = null;
|
||||
try {
|
||||
ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(dataStore.getId(), storagePoolDetailsDao);
|
||||
connectedSdcIps = client.listConnectedSdcIps();
|
||||
} catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
|
||||
LOGGER.error("Failed to create storage pool", e);
|
||||
throw new CloudRuntimeException("Failed to establish connection with PowerFlex Gateway to create storage pool");
|
||||
}
|
||||
|
||||
if (connectedSdcIps == null || connectedSdcIps.isEmpty()) {
|
||||
LOGGER.debug("No connected SDCs found for the PowerFlex storage pool");
|
||||
throw new CloudRuntimeException("Failed to create storage pool as connected SDCs not found");
|
||||
}
|
||||
checkConnectedSdcs(dataStore.getId());
|
||||
|
||||
PrimaryDataStoreInfo primaryDataStoreInfo = (PrimaryDataStoreInfo) dataStore;
|
||||
|
||||
List<HostVO> hostsInCluster = resourceManager.listAllUpAndEnabledHosts(Host.Type.Routing, primaryDataStoreInfo.getClusterId(),
|
||||
primaryDataStoreInfo.getPodId(), primaryDataStoreInfo.getDataCenterId());
|
||||
if (hostsInCluster.isEmpty()) {
|
||||
@ -285,8 +272,7 @@ public class ScaleIOPrimaryDataStoreLifeCycle implements PrimaryDataStoreLifeCyc
|
||||
List<HostVO> poolHosts = new ArrayList<HostVO>();
|
||||
for (HostVO host : hostsInCluster) {
|
||||
try {
|
||||
if (connectedSdcIps.contains(host.getPrivateIpAddress())) {
|
||||
storageMgr.connectHostToSharedPool(host.getId(), primaryDataStoreInfo.getId());
|
||||
if (storageMgr.connectHostToSharedPool(host.getId(), primaryDataStoreInfo.getId())) {
|
||||
poolHosts.add(host);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@ -315,27 +301,14 @@ public class ScaleIOPrimaryDataStoreLifeCycle implements PrimaryDataStoreLifeCyc
|
||||
throw new CloudRuntimeException("Unsupported hypervisor type: " + hypervisorType.toString());
|
||||
}
|
||||
|
||||
List<String> connectedSdcIps = null;
|
||||
try {
|
||||
ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(dataStore.getId(), storagePoolDetailsDao);
|
||||
connectedSdcIps = client.listConnectedSdcIps();
|
||||
} catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
|
||||
LOGGER.error("Failed to create storage pool", e);
|
||||
throw new CloudRuntimeException("Failed to establish connection with PowerFlex Gateway to create storage pool");
|
||||
}
|
||||
|
||||
if (connectedSdcIps == null || connectedSdcIps.isEmpty()) {
|
||||
LOGGER.debug("No connected SDCs found for the PowerFlex storage pool");
|
||||
throw new CloudRuntimeException("Failed to create storage pool as connected SDCs not found");
|
||||
}
|
||||
checkConnectedSdcs(dataStore.getId());
|
||||
|
||||
LOGGER.debug("Attaching the pool to each of the hosts in the zone: " + scope.getScopeId());
|
||||
List<HostVO> hosts = resourceManager.listAllUpAndEnabledHostsInOneZoneByHypervisor(hypervisorType, scope.getScopeId());
|
||||
List<HostVO> poolHosts = new ArrayList<HostVO>();
|
||||
for (HostVO host : hosts) {
|
||||
try {
|
||||
if (connectedSdcIps.contains(host.getPrivateIpAddress())) {
|
||||
storageMgr.connectHostToSharedPool(host.getId(), dataStore.getId());
|
||||
if (storageMgr.connectHostToSharedPool(host.getId(), dataStore.getId())) {
|
||||
poolHosts.add(host);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
@ -352,6 +325,22 @@ public class ScaleIOPrimaryDataStoreLifeCycle implements PrimaryDataStoreLifeCyc
|
||||
return true;
|
||||
}
|
||||
|
||||
private void checkConnectedSdcs(Long dataStoreId) {
|
||||
boolean haveConnectedSdcs = false;
|
||||
try {
|
||||
ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(dataStoreId, storagePoolDetailsDao);
|
||||
haveConnectedSdcs = client.haveConnectedSdcs();
|
||||
} catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
|
||||
LOGGER.error(String.format("Failed to create storage pool for datastore: %s", dataStoreId), e);
|
||||
throw new CloudRuntimeException(String.format("Failed to establish connection with PowerFlex Gateway to create storage pool for datastore: %s", dataStoreId));
|
||||
}
|
||||
|
||||
if (!haveConnectedSdcs) {
|
||||
LOGGER.debug(String.format("No connected SDCs found for the PowerFlex storage pool of datastore: %s", dataStoreId));
|
||||
throw new CloudRuntimeException(String.format("Failed to create storage pool as connected SDCs not found for datastore: %s", dataStoreId));
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean maintain(DataStore store) {
|
||||
storagePoolAutomation.maintain(store);
|
||||
|
||||
@ -21,6 +21,8 @@ package org.apache.cloudstack.storage.datastore.provider;
|
||||
import java.net.URISyntaxException;
|
||||
import java.security.KeyManagementException;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
@ -30,6 +32,8 @@ import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClient;
|
||||
import org.apache.cloudstack.storage.datastore.client.ScaleIOGatewayClientConnectionPool;
|
||||
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
|
||||
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
|
||||
import org.apache.commons.collections.MapUtils;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.agent.AgentManager;
|
||||
@ -69,7 +73,41 @@ public class ScaleIOHostListener implements HypervisorHostListener {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!isHostSdcConnected(host.getPrivateIpAddress(), poolId)) {
|
||||
StoragePool storagePool = (StoragePool)_dataStoreMgr.getDataStore(poolId, DataStoreRole.Primary);
|
||||
|
||||
String systemId = _storagePoolDetailsDao.findDetail(poolId, ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID).getValue();
|
||||
if (systemId == null) {
|
||||
throw new CloudRuntimeException("Failed to get the system id for PowerFlex storage pool " + storagePool.getName());
|
||||
}
|
||||
Map<String,String> details = new HashMap<>();
|
||||
details.put(ScaleIOGatewayClient.STORAGE_POOL_SYSTEM_ID, systemId);
|
||||
|
||||
ModifyStoragePoolCommand cmd = new ModifyStoragePoolCommand(true, storagePool, storagePool.getPath(), details);
|
||||
ModifyStoragePoolAnswer answer = sendModifyStoragePoolCommand(cmd, storagePool, hostId);
|
||||
Map<String,String> poolDetails = answer.getPoolInfo().getDetails();
|
||||
if (MapUtils.isEmpty(poolDetails)) {
|
||||
String msg = "SDC details not found on the host: " + hostId + ", (re)install SDC and restart agent";
|
||||
s_logger.warn(msg);
|
||||
_alertMgr.sendAlert(AlertManager.AlertType.ALERT_TYPE_HOST, host.getDataCenterId(), host.getPodId(), "SDC not found on host: " + host.getUuid(), msg);
|
||||
return false;
|
||||
}
|
||||
|
||||
String sdcId = null;
|
||||
if (poolDetails.containsKey(ScaleIOGatewayClient.SDC_ID)) {
|
||||
sdcId = poolDetails.get(ScaleIOGatewayClient.SDC_ID);
|
||||
} else if (poolDetails.containsKey(ScaleIOGatewayClient.SDC_GUID)) {
|
||||
String sdcGuid = poolDetails.get(ScaleIOGatewayClient.SDC_GUID);
|
||||
sdcId = getHostSdcId(sdcGuid, poolId);
|
||||
}
|
||||
|
||||
if (StringUtils.isBlank(sdcId)) {
|
||||
String msg = "Couldn't retrieve SDC details from the host: " + hostId + ", (re)install SDC and restart agent";
|
||||
s_logger.warn(msg);
|
||||
_alertMgr.sendAlert(AlertManager.AlertType.ALERT_TYPE_HOST, host.getDataCenterId(), host.getPodId(), "SDC details not found on host: " + host.getUuid(), msg);
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!isHostSdcConnected(sdcId, poolId)) {
|
||||
s_logger.warn("SDC not connected on the host: " + hostId);
|
||||
String msg = "SDC not connected on the host: " + hostId + ", reconnect the SDC to MDM and restart agent";
|
||||
_alertMgr.sendAlert(AlertManager.AlertType.ALERT_TYPE_HOST, host.getDataCenterId(), host.getPodId(), "SDC disconnected on host: " + host.getUuid(), msg);
|
||||
@ -78,27 +116,39 @@ public class ScaleIOHostListener implements HypervisorHostListener {
|
||||
|
||||
StoragePoolHostVO storagePoolHost = _storagePoolHostDao.findByPoolHost(poolId, hostId);
|
||||
if (storagePoolHost == null) {
|
||||
storagePoolHost = new StoragePoolHostVO(poolId, hostId, "");
|
||||
storagePoolHost = new StoragePoolHostVO(poolId, hostId, sdcId);
|
||||
_storagePoolHostDao.persist(storagePoolHost);
|
||||
} else {
|
||||
storagePoolHost.setLocalPath(sdcId);
|
||||
_storagePoolHostDao.update(storagePoolHost.getId(), storagePoolHost);
|
||||
}
|
||||
|
||||
StoragePool storagePool = (StoragePool)_dataStoreMgr.getDataStore(poolId, DataStoreRole.Primary);
|
||||
ModifyStoragePoolCommand cmd = new ModifyStoragePoolCommand(true, storagePool);
|
||||
sendModifyStoragePoolCommand(cmd, storagePool, hostId);
|
||||
s_logger.info("Connection established between storage pool: " + storagePool + " and host: " + hostId);
|
||||
return true;
|
||||
}
|
||||
|
||||
private boolean isHostSdcConnected(String hostIpAddress, long poolId) {
|
||||
private String getHostSdcId(String sdcGuid, long poolId) {
|
||||
try {
|
||||
s_logger.debug(String.format("Try to get host SDC Id for pool: %s, with SDC guid %s", poolId, sdcGuid));
|
||||
ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(poolId, _storagePoolDetailsDao);
|
||||
return client.getSdcIdByGuid(sdcGuid);
|
||||
} catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
|
||||
s_logger.error(String.format("Failed to get host SDC Id for pool: %s", poolId), e);
|
||||
throw new CloudRuntimeException(String.format("Failed to establish connection with PowerFlex Gateway to get host SDC Id for pool: %s", poolId));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isHostSdcConnected(String sdcId, long poolId) {
|
||||
try {
|
||||
ScaleIOGatewayClient client = ScaleIOGatewayClientConnectionPool.getInstance().getClient(poolId, _storagePoolDetailsDao);
|
||||
return client.isSdcConnected(hostIpAddress);
|
||||
return client.isSdcConnected(sdcId);
|
||||
} catch (NoSuchAlgorithmException | KeyManagementException | URISyntaxException e) {
|
||||
s_logger.error("Failed to check host sdc connection", e);
|
||||
throw new CloudRuntimeException("Failed to establish connection with PowerFlex Gateway to check host sdc connection");
|
||||
}
|
||||
}
|
||||
|
||||
private void sendModifyStoragePoolCommand(ModifyStoragePoolCommand cmd, StoragePool storagePool, long hostId) {
|
||||
private ModifyStoragePoolAnswer sendModifyStoragePoolCommand(ModifyStoragePoolCommand cmd, StoragePool storagePool, long hostId) {
|
||||
Answer answer = _agentMgr.easySend(hostId, cmd);
|
||||
|
||||
if (answer == null) {
|
||||
@ -116,7 +166,7 @@ public class ScaleIOHostListener implements HypervisorHostListener {
|
||||
|
||||
assert (answer instanceof ModifyStoragePoolAnswer) : "ModifyStoragePoolAnswer expected ; Pool = " + storagePool.getId() + " Host = " + hostId;
|
||||
|
||||
s_logger.info("Connection established between storage pool " + storagePool + " and host: " + hostId);
|
||||
return (ModifyStoragePoolAnswer) answer;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -19,6 +19,7 @@ package org.apache.cloudstack.storage.datastore.util;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import com.cloud.utils.UuidUtils;
|
||||
import com.cloud.utils.script.Script;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
||||
@ -47,11 +48,31 @@ public class ScaleIOUtil {
|
||||
private static final String SDC_HOME_PATH = getSdcHomePath();
|
||||
|
||||
private static final String RESCAN_CMD = "drv_cfg --rescan";
|
||||
|
||||
/**
|
||||
* Cmd for querying volumes in SDC
|
||||
* Sample output for cmd: drv_cfg --query_vols:
|
||||
* Retrieved 2 volume(s)
|
||||
* VOL-ID 6c33633100000009 MDM-ID 218ce1797566a00f
|
||||
* VOL-ID 6c3362a30000000a MDM-ID 218ce1797566a00f
|
||||
*/
|
||||
private static final String QUERY_VOLUMES_CMD = "drv_cfg --query_vols";
|
||||
// Sample output for cmd: drv_cfg --query_vols:
|
||||
// Retrieved 2 volume(s)
|
||||
// VOL-ID 6c33633100000009 MDM-ID 218ce1797566a00f
|
||||
// VOL-ID 6c3362a30000000a MDM-ID 218ce1797566a00f
|
||||
|
||||
/**
|
||||
* Cmd for querying guid in SDC
|
||||
* Sample output for cmd: drv_cfg --query_guid:
|
||||
* B0E3BFB8-C20B-43BF-93C8-13339E85AA50
|
||||
*/
|
||||
private static final String QUERY_GUID_CMD = "drv_cfg --query_guid";
|
||||
|
||||
/**
|
||||
* Cmd for querying MDMs in SDC
|
||||
* Sample output for cmd: drv_cfg --query_mdms:
|
||||
* Retrieved 2 mdm(s)
|
||||
* MDM-ID 3ef46cbf2aaf5d0f SDC ID 6b18479c00000003 INSTALLATION ID 68ab55462cbb3ae4 IPs [0]-x.x.x.x [1]-x.x.x.x
|
||||
* MDM-ID 2e706b2740ec200f SDC ID 301b852c00000003 INSTALLATION ID 33f8662e7a5c1e6c IPs [0]-x.x.x.x [1]-x.x.x.x
|
||||
*/
|
||||
private static final String QUERY_MDMS_CMD = "drv_cfg --query_mdms";
|
||||
|
||||
public static String getSdcHomePath() {
|
||||
String sdcHomePath = DEFAULT_SDC_HOME_PATH;
|
||||
@ -97,6 +118,51 @@ public class ScaleIOUtil {
|
||||
return result;
|
||||
}
|
||||
|
||||
public static String getSdcGuid() {
|
||||
String queryGuidCmd = ScaleIOUtil.SDC_HOME_PATH + "/bin/" + ScaleIOUtil.QUERY_GUID_CMD;
|
||||
String result = Script.runSimpleBashScript(queryGuidCmd);
|
||||
if (result == null) {
|
||||
LOGGER.warn("Failed to get SDC guid");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (result.isEmpty()) {
|
||||
LOGGER.warn("No SDC guid retrieved");
|
||||
return null;
|
||||
}
|
||||
|
||||
if (!UuidUtils.validateUUID(result)) {
|
||||
LOGGER.warn("Invalid SDC guid: " + result);
|
||||
return null;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static String getSdcId(String mdmId) {
|
||||
//query_mdms outputs "MDM-ID <System/MDM-Id> SDC ID <SDC-Id> INSTALLATION ID <Installation-Id> IPs [0]-x.x.x.x [1]-x.x.x.x" for a MDM with ID: <MDM-Id>
|
||||
String queryMdmsCmd = ScaleIOUtil.SDC_HOME_PATH + "/bin/" + ScaleIOUtil.QUERY_MDMS_CMD;
|
||||
queryMdmsCmd += "|grep " + mdmId + "|awk '{print $5}'";
|
||||
String result = Script.runSimpleBashScript(queryMdmsCmd);
|
||||
if (result == null) {
|
||||
LOGGER.warn("Failed to get SDC Id, for the MDM: " + mdmId);
|
||||
return null;
|
||||
}
|
||||
|
||||
if (result.isEmpty()) {
|
||||
LOGGER.warn("No SDC Id retrieved, for the MDM: " + mdmId);
|
||||
return null;
|
||||
}
|
||||
|
||||
String sdcIdRegEx = "^[0-9a-fA-F]{16}$";
|
||||
if (!result.matches(sdcIdRegEx)) {
|
||||
LOGGER.warn("Invalid SDC Id: " + result + " retrieved, for the MDM: " + mdmId);
|
||||
return null;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public static final String getVolumePath(String volumePathWithName) {
|
||||
if (StringUtils.isEmpty(volumePathWithName)) {
|
||||
return volumePathWithName;
|
||||
|
||||
@ -22,7 +22,6 @@ package org.apache.cloudstack.storage.datastore.lifecycle;
|
||||
import static com.google.common.truth.Truth.assertThat;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyLong;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
@ -140,11 +139,7 @@ public class ScaleIOPrimaryDataStoreLifeCycleTest {
|
||||
ScaleIOGatewayClientImpl client = mock(ScaleIOGatewayClientImpl.class);
|
||||
when(ScaleIOGatewayClientConnectionPool.getInstance().getClient(1L, storagePoolDetailsDao)).thenReturn(client);
|
||||
|
||||
List<String> connectedSdcIps = new ArrayList<>();
|
||||
connectedSdcIps.add("192.168.1.1");
|
||||
connectedSdcIps.add("192.168.1.2");
|
||||
when(client.listConnectedSdcIps()).thenReturn(connectedSdcIps);
|
||||
when(client.isSdcConnected(anyString())).thenReturn(true);
|
||||
when(client.haveConnectedSdcs()).thenReturn(true);
|
||||
|
||||
final ZoneScope scope = new ZoneScope(1L);
|
||||
|
||||
|
||||
@ -57,7 +57,7 @@ public class StorPoolStorageAdaptor implements StorageAdaptor {
|
||||
private static final Map<String, KVMStoragePool> storageUuidToStoragePool = new HashMap<String, KVMStoragePool>();
|
||||
|
||||
@Override
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType) {
|
||||
public KVMStoragePool createStoragePool(String uuid, String host, int port, String path, String userInfo, StoragePoolType storagePoolType, Map<String, String> details) {
|
||||
SP_LOG("StorpooolStorageAdaptor.createStoragePool: uuid=%s, host=%s:%d, path=%s, userInfo=%s, type=%s", uuid, host, port, path, userInfo, storagePoolType);
|
||||
|
||||
StorPoolStoragePool storagePool = new StorPoolStoragePool(uuid, host, port, storagePoolType, this);
|
||||
|
||||
@ -161,4 +161,9 @@ public class StorPoolStoragePool implements KVMStoragePool {
|
||||
public boolean supportsConfigDriveIso() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<String, String> getDetails() {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1100,14 +1100,14 @@ public class StorageManagerImpl extends ManagerBase implements StorageManager, C
|
||||
}
|
||||
|
||||
@Override
|
||||
public void connectHostToSharedPool(long hostId, long poolId) throws StorageUnavailableException, StorageConflictException {
|
||||
public boolean connectHostToSharedPool(long hostId, long poolId) throws StorageUnavailableException, StorageConflictException {
|
||||
StoragePool pool = (StoragePool)_dataStoreMgr.getDataStore(poolId, DataStoreRole.Primary);
|
||||
assert (pool.isShared()) : "Now, did you actually read the name of this method?";
|
||||
s_logger.debug("Adding pool " + pool.getName() + " to host " + hostId);
|
||||
|
||||
DataStoreProvider provider = _dataStoreProviderMgr.getDataStoreProvider(pool.getStorageProviderName());
|
||||
HypervisorHostListener listener = hostListeners.get(provider.getName());
|
||||
listener.hostConnect(hostId, pool.getId());
|
||||
return listener.hostConnect(hostId, pool.getId());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -63,7 +63,7 @@ public class StoragePoolMonitorTest {
|
||||
Mockito.when(poolDao.listBy(nullable(Long.class), nullable(Long.class), nullable(Long.class), Mockito.any(ScopeType.class))).thenReturn(Collections.singletonList(pool));
|
||||
Mockito.when(poolDao.findZoneWideStoragePoolsByTags(Mockito.anyLong(), Mockito.any(String[].class))).thenReturn(Collections.<StoragePoolVO>emptyList());
|
||||
Mockito.when(poolDao.findZoneWideStoragePoolsByHypervisor(Mockito.anyLong(), Mockito.any(Hypervisor.HypervisorType.class))).thenReturn(Collections.<StoragePoolVO>emptyList());
|
||||
Mockito.doNothing().when(storageManager).connectHostToSharedPool(host.getId(), pool.getId());
|
||||
Mockito.doReturn(true).when(storageManager).connectHostToSharedPool(host.getId(), pool.getId());
|
||||
|
||||
storagePoolMonitor.processConnect(host, cmd, false);
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user