merge LTS branch 4.22 into main

This commit is contained in:
Daan Hoogland 2025-11-26 11:55:50 +01:00
commit 9032fe3fb5
57 changed files with 816 additions and 504 deletions

View File

@ -0,0 +1,24 @@
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Configuration file for UEFI
guest.nvram.template.legacy=@GUESTNVRAMTEMPLATELEGACY@
guest.loader.legacy=@GUESTLOADERLEGACY@
guest.nvram.template.secure=@GUESTNVRAMTEMPLATESECURE@
guest.loader.secure=@GUESTLOADERSECURE@
guest.nvram.path=@GUESTNVRAMPATH@

View File

@ -53,7 +53,7 @@ public class ListPublicIpAddressesCmd extends BaseListRetrieveOnlyResourceCountC
@Parameter(name = ApiConstants.ALLOCATED_ONLY, type = CommandType.BOOLEAN, description = "limits search results to allocated public IP addresses") @Parameter(name = ApiConstants.ALLOCATED_ONLY, type = CommandType.BOOLEAN, description = "limits search results to allocated public IP addresses")
private Boolean allocatedOnly; private Boolean allocatedOnly;
@Parameter(name = ApiConstants.STATE, type = CommandType.STRING, description = "lists all public IP addresses by state") @Parameter(name = ApiConstants.STATE, type = CommandType.STRING, description = "lists all public IP addresses by state. A comma-separated list of states can be passed")
private String state; private String state;
@Parameter(name = ApiConstants.FOR_VIRTUAL_NETWORK, type = CommandType.BOOLEAN, description = "the virtual network for the IP address") @Parameter(name = ApiConstants.FOR_VIRTUAL_NETWORK, type = CommandType.BOOLEAN, description = "the virtual network for the IP address")

View File

@ -16,6 +16,7 @@
# under the License. # under the License.
/etc/cloudstack/agent/agent.properties /etc/cloudstack/agent/agent.properties
/etc/cloudstack/agent/uefi.properties
/etc/cloudstack/agent/environment.properties /etc/cloudstack/agent/environment.properties
/etc/cloudstack/agent/log4j-cloud.xml /etc/cloudstack/agent/log4j-cloud.xml
/etc/default/cloudstack-agent /etc/default/cloudstack-agent

View File

@ -23,7 +23,7 @@ case "$1" in
configure) configure)
OLDCONFDIR="/etc/cloud/agent" OLDCONFDIR="/etc/cloud/agent"
NEWCONFDIR="/etc/cloudstack/agent" NEWCONFDIR="/etc/cloudstack/agent"
CONFFILES="agent.properties log4j.xml log4j-cloud.xml" CONFFILES="agent.properties uefi.properties log4j.xml log4j-cloud.xml"
mkdir -m 0755 -p /usr/share/cloudstack-agent/tmp mkdir -m 0755 -p /usr/share/cloudstack-agent/tmp

2
debian/control vendored
View File

@ -24,7 +24,7 @@ Description: CloudStack server library
Package: cloudstack-agent Package: cloudstack-agent
Architecture: all Architecture: all
Depends: ${python:Depends}, ${python3:Depends}, openjdk-17-jre-headless | java17-runtime-headless | java17-runtime | zulu-17, cloudstack-common (= ${source:Version}), lsb-base (>= 9), openssh-client, qemu-kvm (>= 2.5) | qemu-system-x86 (>= 5.2), libvirt-bin (>= 1.3) | libvirt-daemon-system (>= 3.0), iproute2, ebtables, vlan, ipset, python3-libvirt, ethtool, iptables, cryptsetup, rng-tools, rsync, lsb-release, ufw, apparmor, cpu-checker, libvirt-daemon-driver-storage-rbd, sysstat Depends: ${python:Depends}, ${python3:Depends}, openjdk-17-jre-headless | java17-runtime-headless | java17-runtime | zulu-17, cloudstack-common (= ${source:Version}), lsb-base (>= 9), openssh-client, qemu-kvm (>= 2.5) | qemu-system-x86 (>= 5.2), libvirt-bin (>= 1.3) | libvirt-daemon-system (>= 3.0), iproute2, ebtables, vlan, ipset, python3-libvirt, ethtool, iptables, cryptsetup, rng-tools, rsync, ovmf, swtpm, lsb-release, ufw, apparmor, cpu-checker, libvirt-daemon-driver-storage-rbd, sysstat
Recommends: init-system-helpers Recommends: init-system-helpers
Conflicts: cloud-agent, cloud-agent-libs, cloud-agent-deps, cloud-agent-scripts Conflicts: cloud-agent, cloud-agent-libs, cloud-agent-deps, cloud-agent-scripts
Description: CloudStack agent Description: CloudStack agent

View File

@ -94,6 +94,14 @@ public class UsageEventUtils {
} }
public static void publishUsageEvent(String usageType, long accountId, long zoneId, long resourceId, String resourceName, Long offeringId, Long templateId,
Long size, String entityType, String entityUUID, Long vmId, boolean displayResource) {
if (displayResource) {
saveUsageEvent(usageType, accountId, zoneId, resourceId, offeringId, templateId, size, vmId, resourceName);
}
publishUsageEvent(usageType, accountId, zoneId, entityType, entityUUID);
}
public static void publishUsageEvent(String usageType, long accountId, long zoneId, long resourceId, String resourceName, Long offeringId, Long templateId, public static void publishUsageEvent(String usageType, long accountId, long zoneId, long resourceId, String resourceName, Long offeringId, Long templateId,
Long size, Long virtualSize, String entityType, String entityUUID, Map<String, String> details) { Long size, Long virtualSize, String entityType, String entityUUID, Map<String, String> details) {
saveUsageEvent(usageType, accountId, zoneId, resourceId, resourceName, offeringId, templateId, size, virtualSize, details); saveUsageEvent(usageType, accountId, zoneId, resourceId, resourceName, offeringId, templateId, size, virtualSize, details);
@ -202,6 +210,10 @@ public class UsageEventUtils {
s_usageEventDao.persist(new UsageEventVO(usageType, accountId, zoneId, vmId, securityGroupId)); s_usageEventDao.persist(new UsageEventVO(usageType, accountId, zoneId, vmId, securityGroupId));
} }
public static void saveUsageEvent(String usageType, long accountId, long zoneId, long resourceId, Long offeringId, Long templateId, Long size, Long vmId, String resourceName) {
s_usageEventDao.persist(new UsageEventVO(usageType, accountId, zoneId, resourceId, offeringId, templateId, size, vmId, resourceName));
}
private static void publishUsageEvent(String usageEventType, Long accountId, Long zoneId, String resourceType, String resourceUUID) { private static void publishUsageEvent(String usageEventType, Long accountId, Long zoneId, String resourceType, String resourceUUID) {
String configKey = "publish.usage.events"; String configKey = "publish.usage.events";
String value = s_configDao.getValue(configKey); String value = s_configDao.getValue(configKey);

View File

@ -903,7 +903,7 @@ public class VolumeOrchestrator extends ManagerBase implements VolumeOrchestrati
// Save usage event and update resource count for user vm volumes // Save usage event and update resource count for user vm volumes
if (vm.getType() == VirtualMachine.Type.User) { if (vm.getType() == VirtualMachine.Type.User) {
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offering.getId(), null, size, UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offering.getId(), null, size,
Volume.class.getName(), vol.getUuid(), vol.isDisplayVolume()); Volume.class.getName(), vol.getUuid(), vol.getInstanceId(), vol.isDisplayVolume());
_resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering); _resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering);
} }
DiskProfile diskProfile = toDiskProfile(vol, offering); DiskProfile diskProfile = toDiskProfile(vol, offering);
@ -981,7 +981,7 @@ public class VolumeOrchestrator extends ManagerBase implements VolumeOrchestrati
} }
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offeringId, vol.getTemplateId(), size, UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offeringId, vol.getTemplateId(), size,
Volume.class.getName(), vol.getUuid(), vol.isDisplayVolume()); Volume.class.getName(), vol.getUuid(), vol.getInstanceId(), vol.isDisplayVolume());
_resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering); _resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering);
} }

View File

@ -75,6 +75,9 @@ public class UsageEventVO implements UsageEvent {
@Column(name = "virtual_size") @Column(name = "virtual_size")
private Long virtualSize; private Long virtualSize;
@Column(name = "vm_id")
private Long vmId;
public UsageEventVO() { public UsageEventVO() {
} }
@ -143,6 +146,18 @@ public class UsageEventVO implements UsageEvent {
this.offeringId = securityGroupId; this.offeringId = securityGroupId;
} }
public UsageEventVO(String usageType, long accountId, long zoneId, long resourceId, Long offeringId, Long templateId, Long size, Long vmId, String resourceName) {
this.type = usageType;
this.accountId = accountId;
this.zoneId = zoneId;
this.resourceId = resourceId;
this.offeringId = offeringId;
this.templateId = templateId;
this.size = size;
this.vmId = vmId;
this.resourceName = resourceName;
}
@Override @Override
public long getId() { public long getId() {
return id; return id;
@ -248,4 +263,11 @@ public class UsageEventVO implements UsageEvent {
this.virtualSize = virtualSize; this.virtualSize = virtualSize;
} }
public Long getVmId() {
return vmId;
}
public void setVmId(Long vmId) {
this.vmId = vmId;
}
} }

View File

@ -45,11 +45,11 @@ public class UsageEventDaoImpl extends GenericDaoBase<UsageEventVO, Long> implem
private final SearchBuilder<UsageEventVO> latestEventsSearch; private final SearchBuilder<UsageEventVO> latestEventsSearch;
private final SearchBuilder<UsageEventVO> IpeventsSearch; private final SearchBuilder<UsageEventVO> IpeventsSearch;
private static final String COPY_EVENTS = private static final String COPY_EVENTS =
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size) " "INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id) "
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size FROM cloud.usage_event vmevt WHERE vmevt.id > ? and vmevt.id <= ? "; + "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id FROM cloud.usage_event vmevt WHERE vmevt.id > ? and vmevt.id <= ? ";
private static final String COPY_ALL_EVENTS = private static final String COPY_ALL_EVENTS =
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size) " "INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id) "
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size FROM cloud.usage_event vmevt WHERE vmevt.id <= ?"; + "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id FROM cloud.usage_event vmevt WHERE vmevt.id <= ?";
private static final String COPY_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) " private static final String COPY_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) "
+ "SELECT id, usage_event_id, name, value FROM cloud.usage_event_details vmevtDetails WHERE vmevtDetails.usage_event_id > ? and vmevtDetails.usage_event_id <= ? "; + "SELECT id, usage_event_id, name, value FROM cloud.usage_event_details vmevtDetails WHERE vmevtDetails.usage_event_id > ? and vmevtDetails.usage_event_id <= ? ";
private static final String COPY_ALL_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) " private static final String COPY_ALL_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) "

View File

@ -89,10 +89,11 @@ import com.cloud.upgrade.dao.Upgrade41900to41910;
import com.cloud.upgrade.dao.Upgrade41910to42000; import com.cloud.upgrade.dao.Upgrade41910to42000;
import com.cloud.upgrade.dao.Upgrade42000to42010; import com.cloud.upgrade.dao.Upgrade42000to42010;
import com.cloud.upgrade.dao.Upgrade42010to42100; import com.cloud.upgrade.dao.Upgrade42010to42100;
import com.cloud.upgrade.dao.Upgrade420to421;
import com.cloud.upgrade.dao.Upgrade42100to42200; import com.cloud.upgrade.dao.Upgrade42100to42200;
import com.cloud.upgrade.dao.Upgrade42200to42210;
import com.cloud.upgrade.dao.Upgrade420to421;
import com.cloud.upgrade.dao.Upgrade421to430; import com.cloud.upgrade.dao.Upgrade421to430;
import com.cloud.upgrade.dao.Upgrade42200to42300; import com.cloud.upgrade.dao.Upgrade42210to42300;
import com.cloud.upgrade.dao.Upgrade430to440; import com.cloud.upgrade.dao.Upgrade430to440;
import com.cloud.upgrade.dao.Upgrade431to440; import com.cloud.upgrade.dao.Upgrade431to440;
import com.cloud.upgrade.dao.Upgrade432to440; import com.cloud.upgrade.dao.Upgrade432to440;
@ -237,7 +238,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
.next("4.20.0.0", new Upgrade42000to42010()) .next("4.20.0.0", new Upgrade42000to42010())
.next("4.20.1.0", new Upgrade42010to42100()) .next("4.20.1.0", new Upgrade42010to42100())
.next("4.21.0.0", new Upgrade42100to42200()) .next("4.21.0.0", new Upgrade42100to42200())
.next("4.22.0.0", new Upgrade42200to42300()) .next("4.22.0.0", new Upgrade42200to42210())
.next("4.22.1.0", new Upgrade42210to42300())
.build(); .build();
} }
@ -315,20 +317,20 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
} }
protected void executeProcedureScripts() { protected void executeProcedureScripts() {
LOGGER.info(String.format("Executing Stored Procedure scripts that are under resource directory [%s].", PROCEDURES_DIRECTORY)); LOGGER.info("Executing Stored Procedure scripts that are under resource directory [{}].", PROCEDURES_DIRECTORY);
List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(PROCEDURES_DIRECTORY); List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(PROCEDURES_DIRECTORY);
try (TransactionLegacy txn = TransactionLegacy.open("execute-procedure-scripts")) { try (TransactionLegacy txn = TransactionLegacy.open("execute-procedure-scripts")) {
Connection conn = txn.getConnection(); Connection conn = txn.getConnection();
for (String filePath : filesPathUnderViewsDirectory) { for (String filePath : filesPathUnderViewsDirectory) {
LOGGER.debug(String.format("Executing PROCEDURE script [%s].", filePath)); LOGGER.debug("Executing PROCEDURE script [{}].", filePath);
InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath); InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath);
runScript(conn, viewScript); runScript(conn, viewScript);
} }
LOGGER.info(String.format("Finished execution of PROCEDURE scripts that are under resource directory [%s].", PROCEDURES_DIRECTORY)); LOGGER.info("Finished execution of PROCEDURE scripts that are under resource directory [{}].", PROCEDURES_DIRECTORY);
} catch (SQLException e) { } catch (SQLException e) {
String message = String.format("Unable to execute PROCEDURE scripts due to [%s].", e.getMessage()); String message = String.format("Unable to execute PROCEDURE scripts due to [%s].", e.getMessage());
LOGGER.error(message, e); LOGGER.error(message, e);
@ -337,7 +339,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
} }
private DbUpgrade[] executeUpgrades(CloudStackVersion dbVersion, CloudStackVersion currentVersion) { private DbUpgrade[] executeUpgrades(CloudStackVersion dbVersion, CloudStackVersion currentVersion) {
LOGGER.info("Database upgrade must be performed from " + dbVersion + " to " + currentVersion); LOGGER.info("Database upgrade must be performed from {} to {}", dbVersion, currentVersion);
final DbUpgrade[] upgrades = calculateUpgradePath(dbVersion, currentVersion); final DbUpgrade[] upgrades = calculateUpgradePath(dbVersion, currentVersion);
@ -350,8 +352,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
private VersionVO executeUpgrade(DbUpgrade upgrade) { private VersionVO executeUpgrade(DbUpgrade upgrade) {
VersionVO version; VersionVO version;
LOGGER.debug("Running upgrade " + upgrade.getClass().getSimpleName() + " to upgrade from " + upgrade.getUpgradableVersionRange()[0] + "-" + upgrade LOGGER.debug("Running upgrade {} to upgrade from {}-{} to {}", upgrade.getClass().getSimpleName(), upgrade.getUpgradableVersionRange()[0], upgrade
.getUpgradableVersionRange()[1] + " to " + upgrade.getUpgradedVersion()); .getUpgradableVersionRange()[1], upgrade.getUpgradedVersion());
TransactionLegacy txn = TransactionLegacy.open("Upgrade"); TransactionLegacy txn = TransactionLegacy.open("Upgrade");
txn.start(); txn.start();
try { try {
@ -394,8 +396,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
// Run the corresponding '-cleanup.sql' script // Run the corresponding '-cleanup.sql' script
txn = TransactionLegacy.open("Cleanup"); txn = TransactionLegacy.open("Cleanup");
try { try {
LOGGER.info("Cleanup upgrade " + upgrade.getClass().getSimpleName() + " to upgrade from " + upgrade.getUpgradableVersionRange()[0] + "-" + upgrade LOGGER.info("Cleanup upgrade {} to upgrade from {}-{} to {}", upgrade.getClass().getSimpleName(), upgrade.getUpgradableVersionRange()[0], upgrade
.getUpgradableVersionRange()[1] + " to " + upgrade.getUpgradedVersion()); .getUpgradableVersionRange()[1], upgrade.getUpgradedVersion());
txn.start(); txn.start();
Connection conn; Connection conn;
@ -410,7 +412,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
if (scripts != null) { if (scripts != null) {
for (InputStream script : scripts) { for (InputStream script : scripts) {
runScript(conn, script); runScript(conn, script);
LOGGER.debug("Cleanup script " + upgrade.getClass().getSimpleName() + " is executed successfully"); LOGGER.debug("Cleanup script {} is executed successfully", upgrade.getClass().getSimpleName());
} }
} }
txn.commit(); txn.commit();
@ -420,27 +422,27 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
version.setUpdated(new Date()); version.setUpdated(new Date());
_dao.update(version.getId(), version); _dao.update(version.getId(), version);
txn.commit(); txn.commit();
LOGGER.debug("Upgrade completed for version " + version.getVersion()); LOGGER.debug("Upgrade completed for version {}", version.getVersion());
} finally { } finally {
txn.close(); txn.close();
} }
} }
protected void executeViewScripts() { protected void executeViewScripts() {
LOGGER.info(String.format("Executing VIEW scripts that are under resource directory [%s].", VIEWS_DIRECTORY)); LOGGER.info("Executing VIEW scripts that are under resource directory [{}].", VIEWS_DIRECTORY);
List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(VIEWS_DIRECTORY); List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(VIEWS_DIRECTORY);
try (TransactionLegacy txn = TransactionLegacy.open("execute-view-scripts")) { try (TransactionLegacy txn = TransactionLegacy.open("execute-view-scripts")) {
Connection conn = txn.getConnection(); Connection conn = txn.getConnection();
for (String filePath : filesPathUnderViewsDirectory) { for (String filePath : filesPathUnderViewsDirectory) {
LOGGER.debug(String.format("Executing VIEW script [%s].", filePath)); LOGGER.debug("Executing VIEW script [{}].", filePath);
InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath); InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath);
runScript(conn, viewScript); runScript(conn, viewScript);
} }
LOGGER.info(String.format("Finished execution of VIEW scripts that are under resource directory [%s].", VIEWS_DIRECTORY)); LOGGER.info("Finished execution of VIEW scripts that are under resource directory [{}].", VIEWS_DIRECTORY);
} catch (SQLException e) { } catch (SQLException e) {
String message = String.format("Unable to execute VIEW scripts due to [%s].", e.getMessage()); String message = String.format("Unable to execute VIEW scripts due to [%s].", e.getMessage());
LOGGER.error(message, e); LOGGER.error(message, e);
@ -470,10 +472,10 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
String csVersion = SystemVmTemplateRegistration.parseMetadataFile(); String csVersion = SystemVmTemplateRegistration.parseMetadataFile();
final CloudStackVersion sysVmVersion = CloudStackVersion.parse(csVersion); final CloudStackVersion sysVmVersion = CloudStackVersion.parse(csVersion);
final CloudStackVersion currentVersion = CloudStackVersion.parse(currentVersionValue); final CloudStackVersion currentVersion = CloudStackVersion.parse(currentVersionValue);
SystemVmTemplateRegistration.CS_MAJOR_VERSION = String.valueOf(sysVmVersion.getMajorRelease()) + "." + String.valueOf(sysVmVersion.getMinorRelease()); SystemVmTemplateRegistration.CS_MAJOR_VERSION = sysVmVersion.getMajorRelease() + "." + sysVmVersion.getMinorRelease();
SystemVmTemplateRegistration.CS_TINY_VERSION = String.valueOf(sysVmVersion.getPatchRelease()); SystemVmTemplateRegistration.CS_TINY_VERSION = String.valueOf(sysVmVersion.getPatchRelease());
LOGGER.info("DB version = " + dbVersion + " Code Version = " + currentVersion); LOGGER.info("DB version = {} Code Version = {}", dbVersion, currentVersion);
if (dbVersion.compareTo(currentVersion) > 0) { if (dbVersion.compareTo(currentVersion) > 0) {
throw new CloudRuntimeException("Database version " + dbVersion + " is higher than management software version " + currentVersionValue); throw new CloudRuntimeException("Database version " + dbVersion + " is higher than management software version " + currentVersionValue);
@ -522,7 +524,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
ResultSet result = pstmt.executeQuery()) { ResultSet result = pstmt.executeQuery()) {
if (result.next()) { if (result.next()) {
String init = result.getString(1); String init = result.getString(1);
LOGGER.info("init = " + DBEncryptionUtil.decrypt(init)); LOGGER.info("init = {}", DBEncryptionUtil.decrypt(init));
} }
} }
} }
@ -553,21 +555,11 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
return upgradedVersion; return upgradedVersion;
} }
@Override
public boolean supportsRollingUpgrade() {
return false;
}
@Override @Override
public InputStream[] getPrepareScripts() { public InputStream[] getPrepareScripts() {
return new InputStream[0]; return new InputStream[0];
} }
@Override
public void performDataMigration(Connection conn) {
}
@Override @Override
public InputStream[] getCleanupScripts() { public InputStream[] getCleanupScripts() {
return new InputStream[0]; return new InputStream[0];

View File

@ -0,0 +1,30 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.upgrade.dao;
public class Upgrade42200to42210 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
@Override
public String[] getUpgradableVersionRange() {
return new String[] {"4.22.0.0", "4.22.1.0"};
}
@Override
public String getUpgradedVersion() {
return "4.22.1.0";
}
}

View File

@ -16,11 +16,11 @@
// under the License. // under the License.
package com.cloud.upgrade.dao; package com.cloud.upgrade.dao;
public class Upgrade42200to42300 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate { public class Upgrade42210to42300 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
@Override @Override
public String[] getUpgradableVersionRange() { public String[] getUpgradableVersionRange() {
return new String[]{"4.22.0.0", "4.23.0.0"}; return new String[]{"4.22.1.0", "4.23.0.0"};
} }
@Override @Override

View File

@ -59,6 +59,9 @@ public class UsageVolumeVO implements InternalIdentity {
@Column(name = "size") @Column(name = "size")
private long size; private long size;
@Column(name = "vm_id")
private Long vmId;
@Column(name = "created") @Column(name = "created")
@Temporal(value = TemporalType.TIMESTAMP) @Temporal(value = TemporalType.TIMESTAMP)
private Date created = null; private Date created = null;
@ -70,13 +73,14 @@ public class UsageVolumeVO implements InternalIdentity {
protected UsageVolumeVO() { protected UsageVolumeVO() {
} }
public UsageVolumeVO(long id, long zoneId, long accountId, long domainId, Long diskOfferingId, Long templateId, long size, Date created, Date deleted) { public UsageVolumeVO(long id, long zoneId, long accountId, long domainId, Long diskOfferingId, Long templateId, Long vmId, long size, Date created, Date deleted) {
this.volumeId = id; this.volumeId = id;
this.zoneId = zoneId; this.zoneId = zoneId;
this.accountId = accountId; this.accountId = accountId;
this.domainId = domainId; this.domainId = domainId;
this.diskOfferingId = diskOfferingId; this.diskOfferingId = diskOfferingId;
this.templateId = templateId; this.templateId = templateId;
this.vmId = vmId;
this.size = size; this.size = size;
this.created = created; this.created = created;
this.deleted = deleted; this.deleted = deleted;
@ -126,4 +130,12 @@ public class UsageVolumeVO implements InternalIdentity {
public long getVolumeId() { public long getVolumeId() {
return volumeId; return volumeId;
} }
public Long getVmId() {
return vmId;
}
public void setVmId(Long vmId) {
this.vmId = vmId;
}
} }

View File

@ -57,6 +57,7 @@ public class UsageStorageDaoImpl extends GenericDaoBase<UsageStorageVO, Long> im
IdSearch.and("accountId", IdSearch.entity().getAccountId(), SearchCriteria.Op.EQ); IdSearch.and("accountId", IdSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
IdSearch.and("id", IdSearch.entity().getEntityId(), SearchCriteria.Op.EQ); IdSearch.and("id", IdSearch.entity().getEntityId(), SearchCriteria.Op.EQ);
IdSearch.and("type", IdSearch.entity().getStorageType(), SearchCriteria.Op.EQ); IdSearch.and("type", IdSearch.entity().getStorageType(), SearchCriteria.Op.EQ);
IdSearch.and("deleted", IdSearch.entity().getDeleted(), SearchCriteria.Op.NULL);
IdSearch.done(); IdSearch.done();
IdZoneSearch = createSearchBuilder(); IdZoneSearch = createSearchBuilder();
@ -74,6 +75,7 @@ public class UsageStorageDaoImpl extends GenericDaoBase<UsageStorageVO, Long> im
sc.setParameters("accountId", accountId); sc.setParameters("accountId", accountId);
sc.setParameters("id", id); sc.setParameters("id", id);
sc.setParameters("type", type); sc.setParameters("type", type);
sc.setParameters("deleted", null);
return listBy(sc, null); return listBy(sc, null);
} }

View File

@ -23,9 +23,7 @@ import com.cloud.usage.UsageVolumeVO;
import com.cloud.utils.db.GenericDao; import com.cloud.utils.db.GenericDao;
public interface UsageVolumeDao extends GenericDao<UsageVolumeVO, Long> { public interface UsageVolumeDao extends GenericDao<UsageVolumeVO, Long> {
public void removeBy(long userId, long id);
public void update(UsageVolumeVO usage);
public List<UsageVolumeVO> getUsageRecords(Long accountId, Long domainId, Date startDate, Date endDate, boolean limit, int page); public List<UsageVolumeVO> getUsageRecords(Long accountId, Long domainId, Date startDate, Date endDate, boolean limit, int page);
List<UsageVolumeVO> listByVolumeId(long volumeId, long accountId);
} }

View File

@ -18,81 +18,46 @@ package com.cloud.usage.dao;
import java.sql.PreparedStatement; import java.sql.PreparedStatement;
import java.sql.ResultSet; import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Date; import java.util.Date;
import java.util.List; import java.util.List;
import java.util.TimeZone; import java.util.TimeZone;
import com.cloud.exception.CloudException; import javax.annotation.PostConstruct;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import com.cloud.usage.UsageVolumeVO; import com.cloud.usage.UsageVolumeVO;
import com.cloud.utils.DateUtil; import com.cloud.utils.DateUtil;
import com.cloud.utils.db.GenericDaoBase; import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.TransactionLegacy; import com.cloud.utils.db.TransactionLegacy;
@Component @Component
public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> implements UsageVolumeDao { public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> implements UsageVolumeDao {
protected static final String REMOVE_BY_USERID_VOLID = "DELETE FROM usage_volume WHERE account_id = ? AND volume_id = ?"; protected static final String GET_USAGE_RECORDS_BY_ACCOUNT = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
protected static final String UPDATE_DELETED = "UPDATE usage_volume SET deleted = ? WHERE account_id = ? AND volume_id = ? and deleted IS NULL";
protected static final String GET_USAGE_RECORDS_BY_ACCOUNT = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted "
+ "FROM usage_volume " + "WHERE account_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR " + "FROM usage_volume " + "WHERE account_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR "
+ " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))"; + " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))";
protected static final String GET_USAGE_RECORDS_BY_DOMAIN = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted " protected static final String GET_USAGE_RECORDS_BY_DOMAIN = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
+ "FROM usage_volume " + "WHERE domain_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR " + "FROM usage_volume " + "WHERE domain_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR "
+ " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))"; + " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))";
protected static final String GET_ALL_USAGE_RECORDS = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted " protected static final String GET_ALL_USAGE_RECORDS = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
+ "FROM usage_volume " + "WHERE (deleted IS NULL) OR (created BETWEEN ? AND ?) OR " + " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?))"; + "FROM usage_volume " + "WHERE (deleted IS NULL) OR (created BETWEEN ? AND ?) OR " + " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?))";
private SearchBuilder<UsageVolumeVO> volumeSearch;
public UsageVolumeDaoImpl() { public UsageVolumeDaoImpl() {
} }
@Override @PostConstruct
public void removeBy(long accountId, long volId) { protected void init() {
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB); volumeSearch = createSearchBuilder();
try { volumeSearch.and("accountId", volumeSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
txn.start(); volumeSearch.and("volumeId", volumeSearch.entity().getVolumeId(), SearchCriteria.Op.EQ);
try(PreparedStatement pstmt = txn.prepareStatement(REMOVE_BY_USERID_VOLID);) { volumeSearch.and("deleted", volumeSearch.entity().getDeleted(), SearchCriteria.Op.NULL);
if (pstmt != null) { volumeSearch.done();
pstmt.setLong(1, accountId);
pstmt.setLong(2, volId);
pstmt.executeUpdate();
}
}catch (SQLException e) {
throw new CloudException("Error removing usageVolumeVO:"+e.getMessage(), e);
}
txn.commit();
} catch (Exception e) {
txn.rollback();
logger.warn("Error removing usageVolumeVO:"+e.getMessage(), e);
} finally {
txn.close();
}
}
@Override
public void update(UsageVolumeVO usage) {
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
PreparedStatement pstmt = null;
try {
txn.start();
if (usage.getDeleted() != null) {
pstmt = txn.prepareAutoCloseStatement(UPDATE_DELETED);
pstmt.setString(1, DateUtil.getDateDisplayString(TimeZone.getTimeZone("GMT"), usage.getDeleted()));
pstmt.setLong(2, usage.getAccountId());
pstmt.setLong(3, usage.getVolumeId());
pstmt.executeUpdate();
}
txn.commit();
} catch (Exception e) {
txn.rollback();
logger.warn("Error updating UsageVolumeVO", e);
} finally {
txn.close();
}
} }
@Override @Override
@ -150,11 +115,15 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
if (tId == 0) { if (tId == 0) {
tId = null; tId = null;
} }
long size = Long.valueOf(rs.getLong(7)); Long vmId = Long.valueOf(rs.getLong(7));
if (vmId == 0) {
vmId = null;
}
long size = Long.valueOf(rs.getLong(8));
Date createdDate = null; Date createdDate = null;
Date deletedDate = null; Date deletedDate = null;
String createdTS = rs.getString(8); String createdTS = rs.getString(9);
String deletedTS = rs.getString(9); String deletedTS = rs.getString(10);
if (createdTS != null) { if (createdTS != null) {
createdDate = DateUtil.parseDateString(s_gmtTimeZone, createdTS); createdDate = DateUtil.parseDateString(s_gmtTimeZone, createdTS);
@ -163,7 +132,7 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
deletedDate = DateUtil.parseDateString(s_gmtTimeZone, deletedTS); deletedDate = DateUtil.parseDateString(s_gmtTimeZone, deletedTS);
} }
usageRecords.add(new UsageVolumeVO(vId, zoneId, acctId, dId, doId, tId, size, createdDate, deletedDate)); usageRecords.add(new UsageVolumeVO(vId, zoneId, acctId, dId, doId, tId, vmId, size, createdDate, deletedDate));
} }
} catch (Exception e) { } catch (Exception e) {
txn.rollback(); txn.rollback();
@ -174,4 +143,13 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
return usageRecords; return usageRecords;
} }
@Override
public List<UsageVolumeVO> listByVolumeId(long volumeId, long accountId) {
SearchCriteria<UsageVolumeVO> sc = volumeSearch.create();
sc.setParameters("accountId", accountId);
sc.setParameters("volumeId", volumeId);
sc.setParameters("deleted", null);
return listBy(sc);
}
} }

View File

@ -101,7 +101,7 @@ public class UserVmDaoImpl extends GenericDaoBase<UserVmVO, Long> implements Use
ReservationDao reservationDao; ReservationDao reservationDao;
private static final String LIST_PODS_HAVING_VMS_FOR_ACCOUNT = private static final String LIST_PODS_HAVING_VMS_FOR_ACCOUNT =
"SELECT pod_id FROM cloud.vm_instance WHERE data_center_id = ? AND account_id = ? AND pod_id IS NOT NULL AND (state = 'Running' OR state = 'Stopped') " "SELECT pod_id FROM cloud.vm_instance WHERE data_center_id = ? AND account_id = ? AND pod_id IS NOT NULL AND state IN ('Starting', 'Running', 'Stopped') "
+ "GROUP BY pod_id HAVING count(id) > 0 ORDER BY count(id) DESC"; + "GROUP BY pod_id HAVING count(id) > 0 ORDER BY count(id) DESC";
private static final String VM_DETAILS = "select vm_instance.id, " private static final String VM_DETAILS = "select vm_instance.id, "

View File

@ -0,0 +1,20 @@
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--;
-- Schema upgrade cleanup from 4.22.0.0 to 4.22.1.0
--;

View File

@ -0,0 +1,27 @@
-- Licensed to the Apache Software Foundation (ASF) under one
-- or more contributor license agreements. See the NOTICE file
-- distributed with this work for additional information
-- regarding copyright ownership. The ASF licenses this file
-- to you under the Apache License, Version 2.0 (the
-- "License"); you may not use this file except in compliance
-- with the License. You may obtain a copy of the License at
--
-- http://www.apache.org/licenses/LICENSE-2.0
--
-- Unless required by applicable law or agreed to in writing,
-- software distributed under the License is distributed on an
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-- KIND, either express or implied. See the License for the
-- specific language governing permissions and limitations
-- under the License.
--;
-- Schema upgrade from 4.22.0.0 to 4.22.1.0
--;
-- Add vm_id column to usage_event table for volume usage events
CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.usage_event','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with volume usage events"');
CALL `cloud_usage`.`IDEMPOTENT_ADD_COLUMN`('cloud_usage.usage_event','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with volume usage events"');
-- Add vm_id column to cloud_usage.usage_volume table
CALL `cloud_usage`.`IDEMPOTENT_ADD_COLUMN`('cloud_usage.usage_volume','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with the volume usage"');

View File

@ -25,10 +25,7 @@ import java.sql.ResultSet;
import java.sql.SQLException; import java.sql.SQLException;
import java.sql.Savepoint; import java.sql.Savepoint;
import java.sql.Statement; import java.sql.Statement;
import java.util.Iterator; import java.util.*;
import java.util.LinkedList;
import java.util.List;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import javax.sql.DataSource; import javax.sql.DataSource;
@ -56,14 +53,12 @@ import com.zaxxer.hikari.HikariDataSource;
/** /**
* Transaction abstracts away the Connection object in JDBC. It allows the * Transaction abstracts away the Connection object in JDBC. It allows the
* following things that the Connection object does not. * following things that the Connection object does not.
*
* 1. Transaction can be started at an entry point and whether the DB * 1. Transaction can be started at an entry point and whether the DB
* actions should be auto-commit or not determined at that point. * actions should be auto-commit or not determined at that point.
* 2. DB Connection is allocated only when it is needed. * 2. DB Connection is allocated only when it is needed.
* 3. Code does not need to know if a transaction has been started or not. * 3. Code does not need to know if a transaction has been started or not.
* It just starts/ends a transaction and we resolve it correctly with * It just starts/ends a transaction and we resolve it correctly with
* the previous actions. * the previous actions.
*
* Note that this class is not synchronous but it doesn't need to be because * Note that this class is not synchronous but it doesn't need to be because
* it is stored with TLS and is one per thread. Use appropriately. * it is stored with TLS and is one per thread. Use appropriately.
*/ */
@ -73,7 +68,7 @@ public class TransactionLegacy implements Closeable {
protected Logger lockLogger = LogManager.getLogger(Transaction.class.getName() + "." + "Lock"); protected Logger lockLogger = LogManager.getLogger(Transaction.class.getName() + "." + "Lock");
protected static Logger CONN_LOGGER = LogManager.getLogger(Transaction.class.getName() + "." + "Connection"); protected static Logger CONN_LOGGER = LogManager.getLogger(Transaction.class.getName() + "." + "Connection");
private static final ThreadLocal<TransactionLegacy> tls = new ThreadLocal<TransactionLegacy>(); private static final ThreadLocal<TransactionLegacy> tls = new ThreadLocal<>();
private static final String START_TXN = "start_txn"; private static final String START_TXN = "start_txn";
private static final String CURRENT_TXN = "current_txn"; private static final String CURRENT_TXN = "current_txn";
private static final String CREATE_TXN = "create_txn"; private static final String CREATE_TXN = "create_txn";
@ -103,7 +98,7 @@ public class TransactionLegacy implements Closeable {
private final LinkedList<StackElement> _stack; private final LinkedList<StackElement> _stack;
private long _id; private long _id;
private final LinkedList<Pair<String, Long>> _lockTimes = new LinkedList<Pair<String, Long>>(); private final LinkedList<Pair<String, Long>> _lockTimes = new LinkedList<>();
private String _name; private String _name;
private Connection _conn; private Connection _conn;
@ -160,7 +155,7 @@ public class TransactionLegacy implements Closeable {
TransactionLegacy txn = tls.get(); TransactionLegacy txn = tls.get();
if (txn == null) { if (txn == null) {
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Creating the transaction: " + name); LOGGER.trace("Creating the transaction: {}", name);
} }
txn = new TransactionLegacy(name, false, databaseId); txn = new TransactionLegacy(name, false, databaseId);
tls.set(txn); tls.set(txn);
@ -206,7 +201,7 @@ public class TransactionLegacy implements Closeable {
public void registerLock(String sql) { public void registerLock(String sql) {
if (_txn && lockLogger.isDebugEnabled()) { if (_txn && lockLogger.isDebugEnabled()) {
Pair<String, Long> time = new Pair<String, Long>(sql, System.currentTimeMillis()); Pair<String, Long> time = new Pair<>(sql, System.currentTimeMillis());
_lockTimes.add(time); _lockTimes.add(time);
} }
} }
@ -218,7 +213,7 @@ public class TransactionLegacy implements Closeable {
public static Connection getStandaloneConnectionWithException() throws SQLException { public static Connection getStandaloneConnectionWithException() throws SQLException {
Connection conn = s_ds.getConnection(); Connection conn = s_ds.getConnection();
if (CONN_LOGGER.isTraceEnabled()) { if (CONN_LOGGER.isTraceEnabled()) {
CONN_LOGGER.trace("Retrieving a standalone connection: dbconn" + System.identityHashCode(conn)); CONN_LOGGER.trace("Retrieving a standalone connection: dbconn{}", System.identityHashCode(conn));
} }
return conn; return conn;
} }
@ -236,7 +231,7 @@ public class TransactionLegacy implements Closeable {
try { try {
Connection conn = s_usageDS.getConnection(); Connection conn = s_usageDS.getConnection();
if (CONN_LOGGER.isTraceEnabled()) { if (CONN_LOGGER.isTraceEnabled()) {
CONN_LOGGER.trace("Retrieving a standalone connection for usage: dbconn" + System.identityHashCode(conn)); CONN_LOGGER.trace("Retrieving a standalone connection for usage: dbconn{}", System.identityHashCode(conn));
} }
return conn; return conn;
} catch (SQLException e) { } catch (SQLException e) {
@ -249,7 +244,7 @@ public class TransactionLegacy implements Closeable {
try { try {
Connection conn = s_simulatorDS.getConnection(); Connection conn = s_simulatorDS.getConnection();
if (CONN_LOGGER.isTraceEnabled()) { if (CONN_LOGGER.isTraceEnabled()) {
CONN_LOGGER.trace("Retrieving a standalone connection for simulator: dbconn" + System.identityHashCode(conn)); CONN_LOGGER.trace("Retrieving a standalone connection for simulator: dbconn{}", System.identityHashCode(conn));
} }
return conn; return conn;
} catch (SQLException e) { } catch (SQLException e) {
@ -266,7 +261,7 @@ public class TransactionLegacy implements Closeable {
Iterator<StackElement> it = _stack.descendingIterator(); Iterator<StackElement> it = _stack.descendingIterator();
while (it.hasNext()) { while (it.hasNext()) {
StackElement element = it.next(); StackElement element = it.next();
if (element.type == ATTACHMENT) { if (Objects.equals(element.type, ATTACHMENT)) {
TransactionAttachment att = (TransactionAttachment)element.ref; TransactionAttachment att = (TransactionAttachment)element.ref;
if (name.equals(att.getName())) { if (name.equals(att.getName())) {
it.remove(); it.remove();
@ -308,7 +303,7 @@ public class TransactionLegacy implements Closeable {
} }
// relax stack structure for several places that @DB required injection is not in place // relax stack structure for several places that @DB required injection is not in place
LOGGER.warn("Non-standard stack context that Transaction context is manaully placed into the calling chain. Stack chain: " + sb); LOGGER.warn("Non-standard stack context that Transaction context is manaully placed into the calling chain. Stack chain: {}", sb);
return true; return true;
} }
@ -344,7 +339,7 @@ public class TransactionLegacy implements Closeable {
private TransactionLegacy(final String name, final boolean forLocking, final short databaseId) { private TransactionLegacy(final String name, final boolean forLocking, final short databaseId) {
_name = name; _name = name;
_conn = null; _conn = null;
_stack = new LinkedList<StackElement>(); _stack = new LinkedList<>();
_txn = false; _txn = false;
_dbId = databaseId; _dbId = databaseId;
_id = s_id.incrementAndGet(); _id = s_id.incrementAndGet();
@ -372,7 +367,7 @@ public class TransactionLegacy implements Closeable {
final StringBuilder str = new StringBuilder((_name != null ? _name : "")); final StringBuilder str = new StringBuilder((_name != null ? _name : ""));
str.append(" : "); str.append(" : ");
for (final StackElement se : _stack) { for (final StackElement se : _stack) {
if (se.type == CURRENT_TXN) { if (Objects.equals(se.type, CURRENT_TXN)) {
str.append(se.ref).append(", "); str.append(se.ref).append(", ");
} }
} }
@ -406,7 +401,7 @@ public class TransactionLegacy implements Closeable {
@Deprecated @Deprecated
public void start() { public void start() {
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("txn: start requested by: " + buildName()); LOGGER.trace("txn: start requested by: {}", buildName());
} }
_stack.push(new StackElement(START_TXN, null)); _stack.push(new StackElement(START_TXN, null));
@ -434,7 +429,7 @@ public class TransactionLegacy implements Closeable {
if (_stmt != null) { if (_stmt != null) {
try { try {
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Closing: " + _stmt.toString()); stmtLogger.trace("Closing: {}", _stmt.toString());
} }
try { try {
ResultSet rs = _stmt.getResultSet(); ResultSet rs = _stmt.getResultSet();
@ -446,7 +441,7 @@ public class TransactionLegacy implements Closeable {
} }
_stmt.close(); _stmt.close();
} catch (final SQLException e) { } catch (final SQLException e) {
stmtLogger.trace("Unable to close statement: " + _stmt.toString()); stmtLogger.trace("Unable to close statement: {}", _stmt.toString());
} finally { } finally {
_stmt = null; _stmt = null;
} }
@ -474,7 +469,7 @@ public class TransactionLegacy implements Closeable {
final Connection conn = getConnection(); final Connection conn = getConnection();
final PreparedStatement pstmt = conn.prepareStatement(sql); final PreparedStatement pstmt = conn.prepareStatement(sql);
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Preparing: " + sql); stmtLogger.trace("Preparing: {}", sql);
} }
return pstmt; return pstmt;
} }
@ -494,7 +489,7 @@ public class TransactionLegacy implements Closeable {
final Connection conn = getConnection(); final Connection conn = getConnection();
final PreparedStatement pstmt = conn.prepareStatement(sql, autoGeneratedKeys); final PreparedStatement pstmt = conn.prepareStatement(sql, autoGeneratedKeys);
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Preparing: " + sql); stmtLogger.trace("Preparing: {}", sql);
} }
closePreviousStatement(); closePreviousStatement();
_stmt = pstmt; _stmt = pstmt;
@ -516,7 +511,7 @@ public class TransactionLegacy implements Closeable {
final Connection conn = getConnection(); final Connection conn = getConnection();
final PreparedStatement pstmt = conn.prepareStatement(sql, columnNames); final PreparedStatement pstmt = conn.prepareStatement(sql, columnNames);
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Preparing: " + sql); stmtLogger.trace("Preparing: {}", sql);
} }
closePreviousStatement(); closePreviousStatement();
_stmt = pstmt; _stmt = pstmt;
@ -537,7 +532,7 @@ public class TransactionLegacy implements Closeable {
final Connection conn = getConnection(); final Connection conn = getConnection();
final PreparedStatement pstmt = conn.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability); final PreparedStatement pstmt = conn.prepareStatement(sql, resultSetType, resultSetConcurrency, resultSetHoldability);
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Preparing: " + sql); stmtLogger.trace("Preparing: {}", sql);
} }
closePreviousStatement(); closePreviousStatement();
_stmt = pstmt; _stmt = pstmt;
@ -546,7 +541,6 @@ public class TransactionLegacy implements Closeable {
/** /**
* Returns the db connection. * Returns the db connection.
*
* Note: that you can call getConnection() but beaware that * Note: that you can call getConnection() but beaware that
* all prepare statements from the Connection are not garbage * all prepare statements from the Connection are not garbage
* collected! * collected!
@ -595,8 +589,7 @@ public class TransactionLegacy implements Closeable {
// //
_stack.push(new StackElement(CREATE_CONN, null)); _stack.push(new StackElement(CREATE_CONN, null));
if (CONN_LOGGER.isTraceEnabled()) { if (CONN_LOGGER.isTraceEnabled()) {
CONN_LOGGER.trace("Creating a DB connection with " + (_txn ? " txn: " : " no txn: ") + " for " + _dbId + ": dbconn" + System.identityHashCode(_conn) + CONN_LOGGER.trace("Creating a DB connection with {} for {}: dbconn{}. Stack: {}", _txn ? " txn: " : " no txn: ", _dbId, System.identityHashCode(_conn), buildName());
". Stack: " + buildName());
} }
} else { } else {
LOGGER.trace("conn: Using existing DB connection"); LOGGER.trace("conn: Using existing DB connection");
@ -615,33 +608,33 @@ public class TransactionLegacy implements Closeable {
} }
protected boolean takeOver(final String name, final boolean create) { protected boolean takeOver(final String name, final boolean create) {
if (_stack.size() != 0) { if (!_stack.isEmpty()) {
if (!create) { if (!create) {
// If it is not a create transaction, then let's just use the current one. // If it is not a create transaction, then let's just use the current one.
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Using current transaction: " + toString()); LOGGER.trace("Using current transaction: {}", this);
} }
mark(name); mark(name);
return false; return false;
} }
final StackElement se = _stack.getFirst(); final StackElement se = _stack.getFirst();
if (se.type == CREATE_TXN) { if (Objects.equals(se.type, CREATE_TXN)) {
// This create is called inside of another create. Which is ok? // This create is called inside of another create. Which is ok?
// We will let that create be responsible for cleaning up. // We will let that create be responsible for cleaning up.
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Create using current transaction: " + toString()); LOGGER.trace("Create using current transaction: {}", this);
} }
mark(name); mark(name);
return false; return false;
} }
LOGGER.warn("Encountered a transaction that has leaked. Cleaning up. " + toString()); LOGGER.warn("Encountered a transaction that has leaked. Cleaning up. {}", this);
cleanup(); cleanup();
} }
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Took over the transaction: " + name); LOGGER.trace("Took over the transaction: {}", name);
} }
_stack.push(new StackElement(create ? CREATE_TXN : CURRENT_TXN, name)); _stack.push(new StackElement(create ? CREATE_TXN : CURRENT_TXN, name));
_name = name; _name = name;
@ -671,7 +664,7 @@ public class TransactionLegacy implements Closeable {
public void close() { public void close() {
removeUpTo(CURRENT_TXN, null); removeUpTo(CURRENT_TXN, null);
if (_stack.size() == 0) { if (_stack.isEmpty()) {
LOGGER.trace("Transaction is done"); LOGGER.trace("Transaction is done");
cleanup(); cleanup();
} }
@ -687,7 +680,7 @@ public class TransactionLegacy implements Closeable {
public boolean close(final String name) { public boolean close(final String name) {
if (_name == null) { // Already cleaned up. if (_name == null) { // Already cleaned up.
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Already cleaned up." + buildName()); LOGGER.trace("Already cleaned up.{}", buildName());
} }
return true; return true;
} }
@ -698,7 +691,7 @@ public class TransactionLegacy implements Closeable {
} }
if (LOGGER.isDebugEnabled() && _stack.size() > 2) { if (LOGGER.isDebugEnabled() && _stack.size() > 2) {
LOGGER.debug("Transaction is not closed properly: " + toString() + ". Called by " + buildName()); LOGGER.debug("Transaction is not closed properly: {}. Called by {}", this, buildName());
} }
cleanup(); cleanup();
@ -714,7 +707,7 @@ public class TransactionLegacy implements Closeable {
protected void clearLockTimes() { protected void clearLockTimes() {
if (lockLogger.isDebugEnabled()) { if (lockLogger.isDebugEnabled()) {
for (Pair<String, Long> time : _lockTimes) { for (Pair<String, Long> time : _lockTimes) {
lockLogger.trace("SQL " + time.first() + " took " + (System.currentTimeMillis() - time.second())); lockLogger.trace("SQL {} took {}", time.first(), System.currentTimeMillis() - time.second());
} }
_lockTimes.clear(); _lockTimes.clear();
} }
@ -722,14 +715,14 @@ public class TransactionLegacy implements Closeable {
public boolean commit() { public boolean commit() {
if (!_txn) { if (!_txn) {
LOGGER.warn("txn: Commit called when it is not a transaction: " + buildName()); LOGGER.warn("txn: Commit called when it is not a transaction: {}", buildName());
return false; return false;
} }
Iterator<StackElement> it = _stack.iterator(); Iterator<StackElement> it = _stack.iterator();
while (it.hasNext()) { while (it.hasNext()) {
StackElement st = it.next(); StackElement st = it.next();
if (st.type == START_TXN) { if (Objects.equals(st.type, START_TXN)) {
it.remove(); it.remove();
break; break;
} }
@ -737,7 +730,7 @@ public class TransactionLegacy implements Closeable {
if (hasTxnInStack()) { if (hasTxnInStack()) {
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("txn: Not committing because transaction started elsewhere: " + buildName() + " / " + toString()); LOGGER.trace("txn: Not committing because transaction started elsewhere: {} / {}", buildName(), this);
} }
return false; return false;
} }
@ -746,7 +739,7 @@ public class TransactionLegacy implements Closeable {
try { try {
if (_conn != null) { if (_conn != null) {
_conn.commit(); _conn.commit();
LOGGER.trace("txn: DB Changes committed. Time = " + (System.currentTimeMillis() - _txnTime)); LOGGER.trace("txn: DB Changes committed. Time = {}", System.currentTimeMillis() - _txnTime);
clearLockTimes(); clearLockTimes();
closeConnection(); closeConnection();
} }
@ -773,7 +766,7 @@ public class TransactionLegacy implements Closeable {
// we should only close db connection when it is not user managed // we should only close db connection when it is not user managed
if (_dbId != CONNECTED_DB) { if (_dbId != CONNECTED_DB) {
if (CONN_LOGGER.isTraceEnabled()) { if (CONN_LOGGER.isTraceEnabled()) {
CONN_LOGGER.trace("Closing DB connection: dbconn" + System.identityHashCode(_conn)); CONN_LOGGER.trace("Closing DB connection: dbconn{}", System.identityHashCode(_conn));
} }
_conn.close(); _conn.close();
_conn = null; _conn = null;
@ -797,13 +790,13 @@ public class TransactionLegacy implements Closeable {
break; break;
} }
if (item.type == CURRENT_TXN) { if (Objects.equals(item.type, CURRENT_TXN)) {
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Releasing the current txn: " + (item.ref != null ? item.ref : "")); LOGGER.trace("Releasing the current txn: {}", item.ref != null ? item.ref : "");
} }
} else if (item.type == CREATE_CONN) { } else if (Objects.equals(item.type, CREATE_CONN)) {
closeConnection(); closeConnection();
} else if (item.type == START_TXN) { } else if (Objects.equals(item.type, START_TXN)) {
if (item.ref == null) { if (item.ref == null) {
rollback = true; rollback = true;
} else { } else {
@ -814,10 +807,10 @@ public class TransactionLegacy implements Closeable {
LOGGER.warn("Unable to rollback Txn.", e); LOGGER.warn("Unable to rollback Txn.", e);
} }
} }
} else if (item.type == STATEMENT) { } else if (Objects.equals(item.type, STATEMENT)) {
try { try {
if (stmtLogger.isTraceEnabled()) { if (stmtLogger.isTraceEnabled()) {
stmtLogger.trace("Closing: " + ref.toString()); stmtLogger.trace("Closing: {}", ref.toString());
} }
Statement stmt = (Statement)ref; Statement stmt = (Statement)ref;
try { try {
@ -830,17 +823,17 @@ public class TransactionLegacy implements Closeable {
} }
stmt.close(); stmt.close();
} catch (final SQLException e) { } catch (final SQLException e) {
stmtLogger.trace("Unable to close statement: " + item); stmtLogger.trace("Unable to close statement: {}", item);
} }
} else if (item.type == ATTACHMENT) { } else if (Objects.equals(item.type, ATTACHMENT)) {
TransactionAttachment att = (TransactionAttachment)item.ref; TransactionAttachment att = (TransactionAttachment)item.ref;
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Cleaning up " + att.getName()); LOGGER.trace("Cleaning up {}", att.getName());
} }
att.cleanup(); att.cleanup();
} }
} catch (Exception e) { } catch (Exception e) {
LOGGER.error("Unable to clean up " + item, e); LOGGER.error("Unable to clean up {}", item, e);
} }
} }
@ -853,7 +846,7 @@ public class TransactionLegacy implements Closeable {
closePreviousStatement(); closePreviousStatement();
if (!_txn) { if (!_txn) {
if (LOGGER.isTraceEnabled()) { if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Rollback called for " + _name + " when there's no transaction: " + buildName()); LOGGER.trace("Rollback called for {} when there's no transaction: {}", _name, buildName());
} }
return; return;
} }
@ -862,7 +855,7 @@ public class TransactionLegacy implements Closeable {
try { try {
if (_conn != null) { if (_conn != null) {
if (LOGGER.isDebugEnabled()) { if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Rolling back the transaction: Time = " + (System.currentTimeMillis() - _txnTime) + " Name = " + _name + "; called by " + buildName()); LOGGER.debug("Rolling back the transaction: Time = {} Name = {}; called by {}", System.currentTimeMillis() - _txnTime, _name, buildName());
} }
_conn.rollback(); _conn.rollback();
} }
@ -879,7 +872,7 @@ public class TransactionLegacy implements Closeable {
_conn.rollback(sp); _conn.rollback(sp);
} }
} catch (SQLException e) { } catch (SQLException e) {
LOGGER.warn("Unable to rollback to savepoint " + sp); LOGGER.warn("Unable to rollback to savepoint {}", sp);
} }
if (!hasTxnInStack()) { if (!hasTxnInStack()) {
@ -892,7 +885,7 @@ public class TransactionLegacy implements Closeable {
Iterator<StackElement> it = _stack.iterator(); Iterator<StackElement> it = _stack.iterator();
while (it.hasNext()) { while (it.hasNext()) {
StackElement st = it.next(); StackElement st = it.next();
if (st.type == START_TXN) { if (Objects.equals(st.type, START_TXN)) {
if (st.ref == null) { if (st.ref == null) {
it.remove(); it.remove();
} else { } else {
@ -943,7 +936,7 @@ public class TransactionLegacy implements Closeable {
Iterator<StackElement> it = _stack.iterator(); Iterator<StackElement> it = _stack.iterator();
while (it.hasNext()) { while (it.hasNext()) {
StackElement se = it.next(); StackElement se = it.next();
if (se.type == START_TXN && se.ref == sp) { if (Objects.equals(se.type, START_TXN) && se.ref == sp) {
return true; return true;
} }
} }
@ -960,7 +953,7 @@ public class TransactionLegacy implements Closeable {
Iterator<StackElement> it = _stack.iterator(); Iterator<StackElement> it = _stack.iterator();
while (it.hasNext()) { while (it.hasNext()) {
StackElement se = it.next(); StackElement se = it.next();
if (se.type == START_TXN) { if (Objects.equals(se.type, START_TXN)) {
it.remove(); it.remove();
if (se.ref == sp) { if (se.ref == sp) {
return; return;
@ -993,7 +986,7 @@ public class TransactionLegacy implements Closeable {
@Override @Override
protected void finalize() throws Throwable { protected void finalize() throws Throwable {
if (!(_conn == null && (_stack == null || _stack.size() == 0))) { if (!(_conn == null && (_stack == null || _stack.isEmpty()))) {
assert (false) : "Oh Alex oh alex...something is wrong with how we're doing this"; assert (false) : "Oh Alex oh alex...something is wrong with how we're doing this";
LOGGER.error("Something went wrong that a transaction is orphaned before db connection is closed"); LOGGER.error("Something went wrong that a transaction is orphaned before db connection is closed");
cleanup(); cleanup();
@ -1052,11 +1045,11 @@ public class TransactionLegacy implements Closeable {
@SuppressWarnings({"rawtypes", "unchecked"}) @SuppressWarnings({"rawtypes", "unchecked"})
public static void initDataSource(Properties dbProps) { public static void initDataSource(Properties dbProps) {
try { try {
if (dbProps.size() == 0) if (dbProps.isEmpty())
return; return;
s_dbHAEnabled = Boolean.valueOf(dbProps.getProperty("db.ha.enabled")); s_dbHAEnabled = Boolean.parseBoolean(dbProps.getProperty("db.ha.enabled"));
LOGGER.info("Is Data Base High Availiability enabled? Ans : " + s_dbHAEnabled); LOGGER.info("Is Data Base High Availiability enabled? Ans : {}", s_dbHAEnabled);
String loadBalanceStrategy = dbProps.getProperty("db.ha.loadBalanceStrategy"); String loadBalanceStrategy = dbProps.getProperty("db.ha.loadBalanceStrategy");
// FIXME: If params are missing...default them???? // FIXME: If params are missing...default them????
final Integer cloudMaxActive = parseNumber(dbProps.getProperty("db.cloud.maxActive"), Integer.class); final Integer cloudMaxActive = parseNumber(dbProps.getProperty("db.cloud.maxActive"), Integer.class);
@ -1082,7 +1075,7 @@ public class TransactionLegacy implements Closeable {
} else if (cloudIsolationLevel.equalsIgnoreCase("readuncommitted")) { } else if (cloudIsolationLevel.equalsIgnoreCase("readuncommitted")) {
isolationLevel = Connection.TRANSACTION_READ_UNCOMMITTED; isolationLevel = Connection.TRANSACTION_READ_UNCOMMITTED;
} else { } else {
LOGGER.warn("Unknown isolation level " + cloudIsolationLevel + ". Using read uncommitted"); LOGGER.warn("Unknown isolation level {}. Using read uncommitted", cloudIsolationLevel);
} }
final boolean cloudTestOnBorrow = Boolean.parseBoolean(dbProps.getProperty("db.cloud.testOnBorrow")); final boolean cloudTestOnBorrow = Boolean.parseBoolean(dbProps.getProperty("db.cloud.testOnBorrow"));
@ -1190,16 +1183,16 @@ public class TransactionLegacy implements Closeable {
driver = dbProps.getProperty(String.format("db.%s.driver", schema)); driver = dbProps.getProperty(String.format("db.%s.driver", schema));
connectionUri = getPropertiesAndBuildConnectionUri(dbProps, loadBalanceStrategy, driver, useSSL, schema); connectionUri = getPropertiesAndBuildConnectionUri(dbProps, loadBalanceStrategy, driver, useSSL, schema);
} else { } else {
LOGGER.warn(String.format("db.%s.uri was set, ignoring the following properties for schema %s of db.properties: [host, port, name, driver, autoReconnect, url.params," LOGGER.warn("db.{}.uri was set, ignoring the following properties for schema {} of db.properties: [host, port, name, driver, autoReconnect, url.params,"
+ " replicas, ha.loadBalanceStrategy, ha.enable, failOverReadOnly, reconnectAtTxEnd, autoReconnectForPools, secondsBeforeRetrySource, queriesBeforeRetrySource, " + " replicas, ha.loadBalanceStrategy, ha.enable, failOverReadOnly, reconnectAtTxEnd, autoReconnectForPools, secondsBeforeRetrySource, queriesBeforeRetrySource, "
+ "initialTimeout].", schema, schema)); + "initialTimeout].", schema, schema);
String[] splitUri = propertyUri.split(":"); String[] splitUri = propertyUri.split(":");
driver = String.format("%s:%s", splitUri[0], splitUri[1]); driver = String.format("%s:%s", splitUri[0], splitUri[1]);
connectionUri = propertyUri; connectionUri = propertyUri;
} }
LOGGER.info(String.format("Using the following URI to connect to %s database [%s].", schema, connectionUri)); LOGGER.info("Using the following URI to connect to {} database [{}].", schema, connectionUri);
return new Pair<>(connectionUri, driver); return new Pair<>(connectionUri, driver);
} }
@ -1215,7 +1208,7 @@ public class TransactionLegacy implements Closeable {
if (s_dbHAEnabled) { if (s_dbHAEnabled) {
dbHaParams = getDBHAParams(schema, dbProps); dbHaParams = getDBHAParams(schema, dbProps);
replicas = dbProps.getProperty(String.format("db.%s.replicas", schema)); replicas = dbProps.getProperty(String.format("db.%s.replicas", schema));
LOGGER.info(String.format("The replicas configured for %s data base are %s.", schema, replicas)); LOGGER.info("The replicas configured for {} data base are {}.", schema, replicas);
} }
return buildConnectionUri(loadBalanceStrategy, driver, useSSL, host, replicas, port, dbName, autoReconnect, urlParams, dbHaParams); return buildConnectionUri(loadBalanceStrategy, driver, useSSL, host, replicas, port, dbName, autoReconnect, urlParams, dbHaParams);
@ -1322,8 +1315,7 @@ public class TransactionLegacy implements Closeable {
config.addDataSourceProperty("elideSetAutoCommits", "true"); config.addDataSourceProperty("elideSetAutoCommits", "true");
config.addDataSourceProperty("maintainTimeStats", "false"); config.addDataSourceProperty("maintainTimeStats", "false");
HikariDataSource dataSource = new HikariDataSource(config); return new HikariDataSource(config);
return dataSource;
} }
private static DataSource createDbcpDataSource(String uri, String username, String password, private static DataSource createDbcpDataSource(String uri, String username, String password,
@ -1411,19 +1403,19 @@ public class TransactionLegacy implements Closeable {
private static String getDBHAParams(String dbName, Properties dbProps) { private static String getDBHAParams(String dbName, Properties dbProps) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("failOverReadOnly=" + dbProps.getProperty("db." + dbName + ".failOverReadOnly")); sb.append("failOverReadOnly=").append(dbProps.getProperty("db." + dbName + ".failOverReadOnly"));
sb.append("&").append("reconnectAtTxEnd=" + dbProps.getProperty("db." + dbName + ".reconnectAtTxEnd")); sb.append("&").append("reconnectAtTxEnd=").append(dbProps.getProperty("db." + dbName + ".reconnectAtTxEnd"));
sb.append("&").append("autoReconnectForPools=" + dbProps.getProperty("db." + dbName + ".autoReconnectForPools")); sb.append("&").append("autoReconnectForPools=").append(dbProps.getProperty("db." + dbName + ".autoReconnectForPools"));
sb.append("&").append("secondsBeforeRetrySource=" + dbProps.getProperty("db." + dbName + ".secondsBeforeRetrySource")); sb.append("&").append("secondsBeforeRetrySource=").append(dbProps.getProperty("db." + dbName + ".secondsBeforeRetrySource"));
sb.append("&").append("queriesBeforeRetrySource=" + dbProps.getProperty("db." + dbName + ".queriesBeforeRetrySource")); sb.append("&").append("queriesBeforeRetrySource=").append(dbProps.getProperty("db." + dbName + ".queriesBeforeRetrySource"));
sb.append("&").append("initialTimeout=" + dbProps.getProperty("db." + dbName + ".initialTimeout")); sb.append("&").append("initialTimeout=").append(dbProps.getProperty("db." + dbName + ".initialTimeout"));
return sb.toString(); return sb.toString();
} }
/** /**
* Used for unit testing primarily * Used for unit testing primarily
* *
* @param conn * @param conn connection to use
*/ */
protected void setConnection(Connection conn) { protected void setConnection(Connection conn) {
_conn = conn; _conn = conn;
@ -1433,7 +1425,7 @@ public class TransactionLegacy implements Closeable {
* Receives a list of {@link PreparedStatement} and quietly closes all of them, which * Receives a list of {@link PreparedStatement} and quietly closes all of them, which
* triggers also closing their dependent objects, like a {@link ResultSet} * triggers also closing their dependent objects, like a {@link ResultSet}
* *
* @param pstmt2Close * @param pstmt2Close list of PreparedStatement to close
*/ */
public static void closePstmts(List<PreparedStatement> pstmt2Close) { public static void closePstmts(List<PreparedStatement> pstmt2Close) {
for (PreparedStatement pstmt : pstmt2Close) { for (PreparedStatement pstmt : pstmt2Close) {

View File

@ -59,3 +59,8 @@ USAGELOG=/var/log/cloudstack/usage/usage.log
USAGESYSCONFDIR=/etc/cloudstack/usage USAGESYSCONFDIR=/etc/cloudstack/usage
PACKAGE=cloudstack PACKAGE=cloudstack
EXTENSIONSDEPLOYMENTMODE=production EXTENSIONSDEPLOYMENTMODE=production
GUESTNVRAMTEMPLATELEGACY=/usr/share/OVMF/OVMF_VARS_4M.fd
GUESTLOADERLEGACY=/usr/share/OVMF/OVMF_CODE_4M.fd
GUESTNVRAMTEMPLATESECURE=/usr/share/OVMF/OVMF_VARS_4M.ms.fd
GUESTLOADERSECURE=/usr/share/OVMF/OVMF_CODE_4M.secboot.fd
GUESTNVRAMPATH=/var/lib/libvirt/qemu/nvram/

View File

@ -115,6 +115,8 @@ Requires: ipset
Requires: perl Requires: perl
Requires: rsync Requires: rsync
Requires: cifs-utils Requires: cifs-utils
Requires: edk2-ovmf
Requires: swtpm
Requires: (python3-libvirt or python3-libvirt-python) Requires: (python3-libvirt or python3-libvirt-python)
Requires: (qemu-img or qemu-tools) Requires: (qemu-img or qemu-tools)
Requires: qemu-kvm Requires: qemu-kvm
@ -356,6 +358,7 @@ install -D packaging/systemd/cloudstack-agent.service ${RPM_BUILD_ROOT}%{_unitdi
install -D packaging/systemd/cloudstack-rolling-maintenance@.service ${RPM_BUILD_ROOT}%{_unitdir}/%{name}-rolling-maintenance@.service install -D packaging/systemd/cloudstack-rolling-maintenance@.service ${RPM_BUILD_ROOT}%{_unitdir}/%{name}-rolling-maintenance@.service
install -D packaging/systemd/cloudstack-agent.default ${RPM_BUILD_ROOT}%{_sysconfdir}/default/%{name}-agent install -D packaging/systemd/cloudstack-agent.default ${RPM_BUILD_ROOT}%{_sysconfdir}/default/%{name}-agent
install -D agent/target/transformed/agent.properties ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/agent.properties install -D agent/target/transformed/agent.properties ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/agent.properties
install -D agent/target/transformed/uefi.properties ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/uefi.properties
install -D agent/target/transformed/environment.properties ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/environment.properties install -D agent/target/transformed/environment.properties ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/environment.properties
install -D agent/target/transformed/log4j-cloud.xml ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/log4j-cloud.xml install -D agent/target/transformed/log4j-cloud.xml ${RPM_BUILD_ROOT}%{_sysconfdir}/%{name}/agent/log4j-cloud.xml
install -D agent/target/transformed/cloud-setup-agent ${RPM_BUILD_ROOT}%{_bindir}/%{name}-setup-agent install -D agent/target/transformed/cloud-setup-agent ${RPM_BUILD_ROOT}%{_bindir}/%{name}-setup-agent
@ -523,7 +526,7 @@ mkdir -m 0755 -p /usr/share/cloudstack-agent/tmp
/usr/bin/systemctl enable cloudstack-rolling-maintenance@p > /dev/null 2>&1 || true /usr/bin/systemctl enable cloudstack-rolling-maintenance@p > /dev/null 2>&1 || true
/usr/bin/systemctl enable --now rngd > /dev/null 2>&1 || true /usr/bin/systemctl enable --now rngd > /dev/null 2>&1 || true
# if saved configs from upgrade exist, copy them over # if saved agent.properties from upgrade exist, copy them over
if [ -f "%{_sysconfdir}/cloud.rpmsave/agent/agent.properties" ]; then if [ -f "%{_sysconfdir}/cloud.rpmsave/agent/agent.properties" ]; then
mv %{_sysconfdir}/%{name}/agent/agent.properties %{_sysconfdir}/%{name}/agent/agent.properties.rpmnew mv %{_sysconfdir}/%{name}/agent/agent.properties %{_sysconfdir}/%{name}/agent/agent.properties.rpmnew
cp -p %{_sysconfdir}/cloud.rpmsave/agent/agent.properties %{_sysconfdir}/%{name}/agent cp -p %{_sysconfdir}/cloud.rpmsave/agent/agent.properties %{_sysconfdir}/%{name}/agent
@ -531,6 +534,14 @@ if [ -f "%{_sysconfdir}/cloud.rpmsave/agent/agent.properties" ]; then
mv %{_sysconfdir}/cloud.rpmsave/agent/agent.properties %{_sysconfdir}/cloud.rpmsave/agent/agent.properties.rpmsave mv %{_sysconfdir}/cloud.rpmsave/agent/agent.properties %{_sysconfdir}/cloud.rpmsave/agent/agent.properties.rpmsave
fi fi
# if saved uefi.properties from upgrade exist, copy them over
if [ -f "%{_sysconfdir}/cloud.rpmsave/agent/uefi.properties" ]; then
mv %{_sysconfdir}/%{name}/agent/uefi.properties %{_sysconfdir}/%{name}/agent/uefi.properties.rpmnew
cp -p %{_sysconfdir}/cloud.rpmsave/agent/uefi.properties %{_sysconfdir}/%{name}/agent
# make sure we only do this on the first install of this RPM, don't want to overwrite on a reinstall
mv %{_sysconfdir}/cloud.rpmsave/agent/uefi.properties %{_sysconfdir}/cloud.rpmsave/agent/uefi.properties.rpmsave
fi
systemctl daemon-reload systemctl daemon-reload
# Print help message # Print help message

View File

@ -58,3 +58,8 @@ USAGECLASSPATH=
USAGELOG=/var/log/cloudstack/usage/usage.log USAGELOG=/var/log/cloudstack/usage/usage.log
USAGESYSCONFDIR=/etc/sysconfig USAGESYSCONFDIR=/etc/sysconfig
EXTENSIONSDEPLOYMENTMODE=production EXTENSIONSDEPLOYMENTMODE=production
GUESTNVRAMTEMPLATELEGACY=/usr/share/edk2/ovmf/OVMF_VARS.fd
GUESTLOADERLEGACY=/usr/share/edk2/ovmf/OVMF_CODE.cc.fd
GUESTNVRAMTEMPLATESECURE=/usr/share/edk2/ovmf/OVMF_VARS.secboot.fd
GUESTLOADERSECURE=/usr/share/edk2/ovmf/OVMF_CODE.secboot.fd
GUESTNVRAMPATH=/var/lib/libvirt/qemu/nvram/

View File

@ -105,7 +105,8 @@ public class VeeamClient {
private static final String REPOSITORY_REFERENCE = "RepositoryReference"; private static final String REPOSITORY_REFERENCE = "RepositoryReference";
private static final String RESTORE_POINT_REFERENCE = "RestorePointReference"; private static final String RESTORE_POINT_REFERENCE = "RestorePointReference";
private static final String BACKUP_FILE_REFERENCE = "BackupFileReference"; private static final String BACKUP_FILE_REFERENCE = "BackupFileReference";
private static final SimpleDateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
private static final ObjectMapper OBJECT_MAPPER = new XmlMapper();
private String veeamServerIp; private String veeamServerIp;
private final Integer veeamServerVersion; private final Integer veeamServerVersion;
@ -124,6 +125,8 @@ public class VeeamClient {
this.taskPollInterval = taskPollInterval; this.taskPollInterval = taskPollInterval;
this.taskPollMaxRetry = taskPollMaxRetry; this.taskPollMaxRetry = taskPollMaxRetry;
OBJECT_MAPPER.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
final RequestConfig config = RequestConfig.custom() final RequestConfig config = RequestConfig.custom()
.setConnectTimeout(timeout * 1000) .setConnectTimeout(timeout * 1000)
.setConnectionRequestTimeout(timeout * 1000) .setConnectionRequestTimeout(timeout * 1000)
@ -233,8 +236,7 @@ public class VeeamClient {
private HttpResponse post(final String path, final Object obj) throws IOException { private HttpResponse post(final String path, final Object obj) throws IOException {
String xml = null; String xml = null;
if (obj != null) { if (obj != null) {
XmlMapper xmlMapper = new XmlMapper(); xml = OBJECT_MAPPER.writer()
xml = xmlMapper.writer()
.with(ToXmlGenerator.Feature.WRITE_XML_DECLARATION) .with(ToXmlGenerator.Feature.WRITE_XML_DECLARATION)
.writeValueAsString(obj); .writeValueAsString(obj);
// Remove invalid/empty xmlns // Remove invalid/empty xmlns
@ -277,8 +279,7 @@ public class VeeamClient {
try { try {
final HttpResponse response = get("/hierarchyRoots"); final HttpResponse response = get("/hierarchyRoots");
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final EntityReferences references = OBJECT_MAPPER.readValue(response.getEntity().getContent(), EntityReferences.class);
final EntityReferences references = objectMapper.readValue(response.getEntity().getContent(), EntityReferences.class);
for (final Ref ref : references.getRefs()) { for (final Ref ref : references.getRefs()) {
if (ref.getName().equals(vmwareDcName) && ref.getType().equals(HIERARCHY_ROOT_REFERENCE)) { if (ref.getName().equals(vmwareDcName) && ref.getType().equals(HIERARCHY_ROOT_REFERENCE)) {
return ref.getUid(); return ref.getUid();
@ -297,8 +298,7 @@ public class VeeamClient {
try { try {
final HttpResponse response = get(String.format("/lookup?host=%s&type=Vm&name=%s", hierarchyId, vmName)); final HttpResponse response = get(String.format("/lookup?host=%s&type=Vm&name=%s", hierarchyId, vmName));
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final HierarchyItems items = OBJECT_MAPPER.readValue(response.getEntity().getContent(), HierarchyItems.class);
final HierarchyItems items = objectMapper.readValue(response.getEntity().getContent(), HierarchyItems.class);
if (items == null || items.getItems() == null || items.getItems().isEmpty()) { if (items == null || items.getItems() == null || items.getItems().isEmpty()) {
throw new CloudRuntimeException("Could not find VM " + vmName + " in Veeam, please ask administrator to check Veeam B&R manager"); throw new CloudRuntimeException("Could not find VM " + vmName + " in Veeam, please ask administrator to check Veeam B&R manager");
} }
@ -316,14 +316,12 @@ public class VeeamClient {
private Task parseTaskResponse(HttpResponse response) throws IOException { private Task parseTaskResponse(HttpResponse response) throws IOException {
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); return OBJECT_MAPPER.readValue(response.getEntity().getContent(), Task.class);
return objectMapper.readValue(response.getEntity().getContent(), Task.class);
} }
protected RestoreSession parseRestoreSessionResponse(HttpResponse response) throws IOException { protected RestoreSession parseRestoreSessionResponse(HttpResponse response) throws IOException {
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); return OBJECT_MAPPER.readValue(response.getEntity().getContent(), RestoreSession.class);
return objectMapper.readValue(response.getEntity().getContent(), RestoreSession.class);
} }
private boolean checkTaskStatus(final HttpResponse response) throws IOException { private boolean checkTaskStatus(final HttpResponse response) throws IOException {
@ -410,8 +408,7 @@ public class VeeamClient {
String repositoryName = getRepositoryNameFromJob(backupName); String repositoryName = getRepositoryNameFromJob(backupName);
final HttpResponse response = get(String.format("/backupServers/%s/repositories", backupServerId)); final HttpResponse response = get(String.format("/backupServers/%s/repositories", backupServerId));
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final EntityReferences references = OBJECT_MAPPER.readValue(response.getEntity().getContent(), EntityReferences.class);
final EntityReferences references = objectMapper.readValue(response.getEntity().getContent(), EntityReferences.class);
for (final Ref ref : references.getRefs()) { for (final Ref ref : references.getRefs()) {
if (ref.getType().equals(REPOSITORY_REFERENCE) && ref.getName().equals(repositoryName)) { if (ref.getType().equals(REPOSITORY_REFERENCE) && ref.getName().equals(repositoryName)) {
return ref; return ref;
@ -447,8 +444,7 @@ public class VeeamClient {
try { try {
final HttpResponse response = get("/backups"); final HttpResponse response = get("/backups");
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final EntityReferences entityReferences = OBJECT_MAPPER.readValue(response.getEntity().getContent(), EntityReferences.class);
final EntityReferences entityReferences = objectMapper.readValue(response.getEntity().getContent(), EntityReferences.class);
for (final Ref ref : entityReferences.getRefs()) { for (final Ref ref : entityReferences.getRefs()) {
logger.debug("Veeam Backup found, name: " + ref.getName() + ", uid: " + ref.getUid() + ", type: " + ref.getType()); logger.debug("Veeam Backup found, name: " + ref.getName() + ", uid: " + ref.getUid() + ", type: " + ref.getType());
} }
@ -463,8 +459,7 @@ public class VeeamClient {
try { try {
final HttpResponse response = get("/jobs"); final HttpResponse response = get("/jobs");
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final EntityReferences entityReferences = OBJECT_MAPPER.readValue(response.getEntity().getContent(), EntityReferences.class);
final EntityReferences entityReferences = objectMapper.readValue(response.getEntity().getContent(), EntityReferences.class);
final List<BackupOffering> policies = new ArrayList<>(); final List<BackupOffering> policies = new ArrayList<>();
if (entityReferences == null || entityReferences.getRefs() == null) { if (entityReferences == null || entityReferences.getRefs() == null) {
return policies; return policies;
@ -486,9 +481,7 @@ public class VeeamClient {
final HttpResponse response = get(String.format("/jobs/%s?format=Entity", final HttpResponse response = get(String.format("/jobs/%s?format=Entity",
jobId.replace("urn:veeam:Job:", ""))); jobId.replace("urn:veeam:Job:", "")));
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); return OBJECT_MAPPER.readValue(response.getEntity().getContent(), Job.class);
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
return objectMapper.readValue(response.getEntity().getContent(), Job.class);
} catch (final IOException e) { } catch (final IOException e) {
logger.error("Failed to list Veeam jobs due to:", e); logger.error("Failed to list Veeam jobs due to:", e);
checkResponseTimeOut(e); checkResponseTimeOut(e);
@ -568,9 +561,7 @@ public class VeeamClient {
final String veeamVmRefId = lookupVM(hierarchyId, vmwareInstanceName); final String veeamVmRefId = lookupVM(hierarchyId, vmwareInstanceName);
final HttpResponse response = get(String.format("/jobs/%s/includes", jobId)); final HttpResponse response = get(String.format("/jobs/%s/includes", jobId));
checkResponseOK(response); checkResponseOK(response);
final ObjectMapper objectMapper = new XmlMapper(); final ObjectsInJob jobObjects = OBJECT_MAPPER.readValue(response.getEntity().getContent(), ObjectsInJob.class);
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
final ObjectsInJob jobObjects = objectMapper.readValue(response.getEntity().getContent(), ObjectsInJob.class);
if (jobObjects == null || jobObjects.getObjects() == null) { if (jobObjects == null || jobObjects.getObjects() == null) {
logger.warn("No objects found in the Veeam job " + jobId); logger.warn("No objects found in the Veeam job " + jobId);
return false; return false;
@ -710,8 +701,7 @@ public class VeeamClient {
protected Map<String, Backup.Metric> processHttpResponseForBackupMetrics(final InputStream content) { protected Map<String, Backup.Metric> processHttpResponseForBackupMetrics(final InputStream content) {
Map<String, Backup.Metric> metrics = new HashMap<>(); Map<String, Backup.Metric> metrics = new HashMap<>();
try { try {
final ObjectMapper objectMapper = new XmlMapper(); final BackupFiles backupFiles = OBJECT_MAPPER.readValue(content, BackupFiles.class);
final BackupFiles backupFiles = objectMapper.readValue(content, BackupFiles.class);
if (backupFiles == null || CollectionUtils.isEmpty(backupFiles.getBackupFiles())) { if (backupFiles == null || CollectionUtils.isEmpty(backupFiles.getBackupFiles())) {
throw new CloudRuntimeException("Could not get backup metrics via Veeam B&R API"); throw new CloudRuntimeException("Could not get backup metrics via Veeam B&R API");
} }
@ -855,8 +845,7 @@ public class VeeamClient {
public List<Backup.RestorePoint> processHttpResponseForVmRestorePoints(InputStream content, String vmwareDcName, String vmInternalName, Map<String, Backup.Metric> metricsMap) { public List<Backup.RestorePoint> processHttpResponseForVmRestorePoints(InputStream content, String vmwareDcName, String vmInternalName, Map<String, Backup.Metric> metricsMap) {
List<Backup.RestorePoint> vmRestorePointList = new ArrayList<>(); List<Backup.RestorePoint> vmRestorePointList = new ArrayList<>();
try { try {
final ObjectMapper objectMapper = new XmlMapper(); final VmRestorePoints vmRestorePoints = OBJECT_MAPPER.readValue(content, VmRestorePoints.class);
final VmRestorePoints vmRestorePoints = objectMapper.readValue(content, VmRestorePoints.class);
final String hierarchyId = findDCHierarchy(vmwareDcName); final String hierarchyId = findDCHierarchy(vmwareDcName);
final String hierarchyUuid = StringUtils.substringAfterLast(hierarchyId, ":"); final String hierarchyUuid = StringUtils.substringAfterLast(hierarchyId, ":");
if (vmRestorePoints == null) { if (vmRestorePoints == null) {
@ -907,7 +896,7 @@ public class VeeamClient {
} }
private Date formatDate(String date) throws ParseException { private Date formatDate(String date) throws ParseException {
return dateFormat.parse(StringUtils.substring(date, 0, 19)); return DATE_FORMAT.parse(StringUtils.substring(date, 0, 19));
} }
public Pair<Boolean, String> restoreVMToDifferentLocation(String restorePointId, String restoreLocation, String hostIp, String dataStoreUuid) { public Pair<Boolean, String> restoreVMToDifferentLocation(String restorePointId, String restoreLocation, String hostIp, String dataStoreUuid) {

View File

@ -483,7 +483,9 @@ public class VeeamClientTest {
" xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\n" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"\n" +
" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n" +
" xmlns=\"http://www.veeam.com/ent/v1.0\">\n" + " xmlns=\"http://www.veeam.com/ent/v1.0\">\n" +
" <VmRestorePoint Href=\"https://10.0.3.142:9398/api/vmRestorePoints/f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977?format=Entity\" Type=\"VmRestorePoint\" Name=\"i-2-4-VM@2023-11-03 16:26:12.209913\" UID=\"urn:veeam:VmRestorePoint:f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977\" VmDisplayName=\"i-2-4-VM\">\n" + " <VmRestorePoint Href=\"https://10.0.3.142:9398/api/vmRestorePoints/f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977?format=Entity\"" +
" Type=\"VmRestorePoint\" Name=\"i-2-4-VM@2023-11-03 16:26:12.209913\" UID=\"urn:veeam:VmRestorePoint:f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977\"" +
" VmDisplayName=\"i-2-4-VM\" SqlInfo=\"SqlInfo\">\n" +
" <Links>\n" + " <Links>\n" +
" <Link Href=\"https://10.0.3.142:9398/api/vmRestorePoints/f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977?action=restore\" Rel=\"Restore\" />\n" + " <Link Href=\"https://10.0.3.142:9398/api/vmRestorePoints/f6d504cf-eafe-4cd2-8dfc-e9cfe2f1e977?action=restore\" Rel=\"Restore\" />\n" +
" <Link Href=\"https://10.0.3.142:9398/api/backupServers/18cc2a81-1ff0-42cd-8389-62f2bbcc6b7f\" Name=\"10.0.3.142\" Type=\"BackupServerReference\" Rel=\"Up\" />\n" + " <Link Href=\"https://10.0.3.142:9398/api/backupServers/18cc2a81-1ff0-42cd-8389-62f2bbcc6b7f\" Name=\"10.0.3.142\" Type=\"BackupServerReference\" Rel=\"Up\" />\n" +

View File

@ -3293,25 +3293,25 @@ public class LibvirtComputingResource extends ServerResourceBase implements Serv
setGuestLoader(bootMode, SECURE, guest, GuestDef.GUEST_LOADER_SECURE); setGuestLoader(bootMode, SECURE, guest, GuestDef.GUEST_LOADER_SECURE);
setGuestLoader(bootMode, LEGACY, guest, GuestDef.GUEST_LOADER_LEGACY); setGuestLoader(bootMode, LEGACY, guest, GuestDef.GUEST_LOADER_LEGACY);
if (isUefiPropertieNotNull(GuestDef.GUEST_NVRAM_PATH)) { if (isUefiPropertyNotNull(GuestDef.GUEST_NVRAM_PATH)) {
guest.setNvram(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_PATH)); guest.setNvram(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_PATH));
} }
if (isSecureBoot && isUefiPropertieNotNull(GuestDef.GUEST_NVRAM_TEMPLATE_SECURE) && SECURE.equalsIgnoreCase(bootMode)) { if (isSecureBoot && isUefiPropertyNotNull(GuestDef.GUEST_NVRAM_TEMPLATE_SECURE) && SECURE.equalsIgnoreCase(bootMode)) {
guest.setNvramTemplate(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_TEMPLATE_SECURE)); guest.setNvramTemplate(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_TEMPLATE_SECURE));
} else if (isUefiPropertieNotNull(GuestDef.GUEST_NVRAM_TEMPLATE_LEGACY)) { } else if (isUefiPropertyNotNull(GuestDef.GUEST_NVRAM_TEMPLATE_LEGACY)) {
guest.setNvramTemplate(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_TEMPLATE_LEGACY)); guest.setNvramTemplate(uefiProperties.getProperty(GuestDef.GUEST_NVRAM_TEMPLATE_LEGACY));
} }
} }
private void setGuestLoader(String bootMode, String mode, GuestDef guest, String propertie) { private void setGuestLoader(String bootMode, String mode, GuestDef guest, String property) {
if (isUefiPropertieNotNull(propertie) && mode.equalsIgnoreCase(bootMode)) { if (isUefiPropertyNotNull(property) && mode.equalsIgnoreCase(bootMode)) {
guest.setLoader(uefiProperties.getProperty(propertie)); guest.setLoader(uefiProperties.getProperty(property));
} }
} }
private boolean isUefiPropertieNotNull(String propertie) { private boolean isUefiPropertyNotNull(String property) {
return uefiProperties.getProperty(propertie) != null; return uefiProperties.getProperty(property) != null;
} }
public boolean isGuestAarch64() { public boolean isGuestAarch64() {

View File

@ -695,7 +695,12 @@ public class VMwareGuru extends HypervisorGuruBase implements HypervisorGuru, Co
updateTemplateRef(templateId, poolId, templatePath, templateSize); updateTemplateRef(templateId, poolId, templatePath, templateSize);
return templateId; return templateId;
} else { } else {
return volumeVO.getTemplateId(); Long templateId = volumeVO.getTemplateId();
if (templateId == null && volumeVO.getInstanceId() != null) {
VMInstanceVO vmInstanceVO = vmDao.findByIdIncludingRemoved(volumeVO.getInstanceId());
return vmInstanceVO.getTemplateId();
}
return templateId;
} }
} }
} }

View File

@ -61,7 +61,7 @@ public interface KubernetesClusterService extends PluggableService, Configurable
"cloud.kubernetes.cluster.network.offering", "cloud.kubernetes.cluster.network.offering",
"DefaultNetworkOfferingforKubernetesService", "DefaultNetworkOfferingforKubernetesService",
"Name of the network offering that will be used to create isolated network in which Kubernetes cluster VMs will be launched", "Name of the network offering that will be used to create isolated network in which Kubernetes cluster VMs will be launched",
false, true,
KubernetesServiceEnabled.key()); KubernetesServiceEnabled.key());
static final ConfigKey<Long> KubernetesClusterStartTimeout = new ConfigKey<Long>("Advanced", Long.class, static final ConfigKey<Long> KubernetesClusterStartTimeout = new ConfigKey<Long>("Advanced", Long.class,
"cloud.kubernetes.cluster.start.timeout", "cloud.kubernetes.cluster.start.timeout",

View File

@ -49,7 +49,7 @@ public class ADLdapUserManagerImpl extends OpenLdapUserManagerImpl implements Ld
searchControls.setReturningAttributes(_ldapConfiguration.getReturnAttributes(domainId)); searchControls.setReturningAttributes(_ldapConfiguration.getReturnAttributes(domainId));
NamingEnumeration<SearchResult> results = context.search(basedn, generateADGroupSearchFilter(groupName, domainId), searchControls); NamingEnumeration<SearchResult> results = context.search(basedn, generateADGroupSearchFilter(groupName, domainId), searchControls);
final List<LdapUser> users = new ArrayList<LdapUser>(); final List<LdapUser> users = new ArrayList<>();
while (results.hasMoreElements()) { while (results.hasMoreElements()) {
final SearchResult result = results.nextElement(); final SearchResult result = results.nextElement();
users.add(createUser(result, domainId)); users.add(createUser(result, domainId));
@ -58,10 +58,8 @@ public class ADLdapUserManagerImpl extends OpenLdapUserManagerImpl implements Ld
} }
String generateADGroupSearchFilter(String groupName, Long domainId) { String generateADGroupSearchFilter(String groupName, Long domainId) {
final StringBuilder userObjectFilter = new StringBuilder();
userObjectFilter.append("(objectClass="); final StringBuilder userObjectFilter = getUserObjectFilter(domainId);
userObjectFilter.append(_ldapConfiguration.getUserObject(domainId));
userObjectFilter.append(")");
final StringBuilder memberOfFilter = new StringBuilder(); final StringBuilder memberOfFilter = new StringBuilder();
String groupCnName = _ldapConfiguration.getCommonNameAttribute() + "=" +groupName + "," + _ldapConfiguration.getBaseDn(domainId); String groupCnName = _ldapConfiguration.getCommonNameAttribute() + "=" +groupName + "," + _ldapConfiguration.getBaseDn(domainId);
@ -75,10 +73,18 @@ public class ADLdapUserManagerImpl extends OpenLdapUserManagerImpl implements Ld
result.append(memberOfFilter); result.append(memberOfFilter);
result.append(")"); result.append(")");
logger.debug("group search filter = " + result); logger.debug("group search filter = {}", result);
return result.toString(); return result.toString();
} }
StringBuilder getUserObjectFilter(Long domainId) {
final StringBuilder userObjectFilter = new StringBuilder();
userObjectFilter.append("(&(objectCategory=person)");
userObjectFilter.append(super.getUserObjectFilter(domainId));
userObjectFilter.append(")");
return userObjectFilter;
}
protected boolean isUserDisabled(SearchResult result) throws NamingException { protected boolean isUserDisabled(SearchResult result) throws NamingException {
boolean isDisabledUser = false; boolean isDisabledUser = false;
String userAccountControl = LdapUtils.getAttributeValue(result.getAttributes(), _ldapConfiguration.getUserAccountControlAttribute()); String userAccountControl = LdapUtils.getAttributeValue(result.getAttributes(), _ldapConfiguration.getUserAccountControlAttribute());

View File

@ -75,23 +75,15 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
} }
private String generateSearchFilter(final String username, Long domainId) { private String generateSearchFilter(final String username, Long domainId) {
final StringBuilder userObjectFilter = new StringBuilder(); final StringBuilder userObjectFilter = getUserObjectFilter(domainId);
userObjectFilter.append("(objectClass=");
userObjectFilter.append(_ldapConfiguration.getUserObject(domainId));
userObjectFilter.append(")");
final StringBuilder usernameFilter = new StringBuilder(); final StringBuilder usernameFilter = getUsernameFilter(username, domainId);
usernameFilter.append("(");
usernameFilter.append(_ldapConfiguration.getUsernameAttribute(domainId));
usernameFilter.append("=");
usernameFilter.append((username == null ? "*" : LdapUtils.escapeLDAPSearchFilter(username)));
usernameFilter.append(")");
String memberOfAttribute = getMemberOfAttribute(domainId); String memberOfAttribute = getMemberOfAttribute(domainId);
StringBuilder ldapGroupsFilter = new StringBuilder(); StringBuilder ldapGroupsFilter = new StringBuilder();
// this should get the trustmaps for this domain // this should get the trustmaps for this domain
List<String> ldapGroups = getMappedLdapGroups(domainId); List<String> ldapGroups = getMappedLdapGroups(domainId);
if (null != ldapGroups && ldapGroups.size() > 0) { if (!ldapGroups.isEmpty()) {
ldapGroupsFilter.append("(|"); ldapGroupsFilter.append("(|");
for (String ldapGroup : ldapGroups) { for (String ldapGroup : ldapGroups) {
ldapGroupsFilter.append(getMemberOfGroupString(ldapGroup, memberOfAttribute)); ldapGroupsFilter.append(getMemberOfGroupString(ldapGroup, memberOfAttribute));
@ -104,21 +96,35 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
if (null != pricipleGroup) { if (null != pricipleGroup) {
principleGroupFilter.append(getMemberOfGroupString(pricipleGroup, memberOfAttribute)); principleGroupFilter.append(getMemberOfGroupString(pricipleGroup, memberOfAttribute));
} }
final StringBuilder result = new StringBuilder();
result.append("(&");
result.append(userObjectFilter);
result.append(usernameFilter);
result.append(ldapGroupsFilter);
result.append(principleGroupFilter);
result.append(")");
String returnString = result.toString(); String returnString = "(&" +
if (logger.isTraceEnabled()) { userObjectFilter +
logger.trace("constructed ldap query: " + returnString); usernameFilter +
} ldapGroupsFilter +
principleGroupFilter +
")";
logger.trace("constructed ldap query: {}", returnString);
return returnString; return returnString;
} }
private StringBuilder getUsernameFilter(String username, Long domainId) {
final StringBuilder usernameFilter = new StringBuilder();
usernameFilter.append("(");
usernameFilter.append(_ldapConfiguration.getUsernameAttribute(domainId));
usernameFilter.append("=");
usernameFilter.append((username == null ? "*" : LdapUtils.escapeLDAPSearchFilter(username)));
usernameFilter.append(")");
return usernameFilter;
}
StringBuilder getUserObjectFilter(Long domainId) {
final StringBuilder userObjectFilter = new StringBuilder();
userObjectFilter.append("(objectClass=");
userObjectFilter.append(_ldapConfiguration.getUserObject(domainId));
userObjectFilter.append(")");
return userObjectFilter;
}
private List<String> getMappedLdapGroups(Long domainId) { private List<String> getMappedLdapGroups(Long domainId) {
List <String> ldapGroups = new ArrayList<>(); List <String> ldapGroups = new ArrayList<>();
// first get the trustmaps // first get the trustmaps
@ -134,37 +140,31 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
private String getMemberOfGroupString(String group, String memberOfAttribute) { private String getMemberOfGroupString(String group, String memberOfAttribute) {
final StringBuilder memberOfFilter = new StringBuilder(); final StringBuilder memberOfFilter = new StringBuilder();
if (null != group) { if (null != group) {
if(logger.isDebugEnabled()) { logger.debug("adding search filter for '{}', using '{}'", group, memberOfAttribute);
logger.debug("adding search filter for '" + group + memberOfFilter.append("(")
"', using '" + memberOfAttribute + "'"); .append(memberOfAttribute)
} .append("=")
memberOfFilter.append("(" + memberOfAttribute + "="); .append(group)
memberOfFilter.append(group); .append(")");
memberOfFilter.append(")");
} }
return memberOfFilter.toString(); return memberOfFilter.toString();
} }
private String generateGroupSearchFilter(final String groupName, Long domainId) { private String generateGroupSearchFilter(final String groupName, Long domainId) {
final StringBuilder groupObjectFilter = new StringBuilder(); String groupObjectFilter = "(objectClass=" +
groupObjectFilter.append("(objectClass="); _ldapConfiguration.getGroupObject(domainId) +
groupObjectFilter.append(_ldapConfiguration.getGroupObject(domainId)); ")";
groupObjectFilter.append(")");
final StringBuilder groupNameFilter = new StringBuilder(); String groupNameFilter = "(" +
groupNameFilter.append("("); _ldapConfiguration.getCommonNameAttribute() +
groupNameFilter.append(_ldapConfiguration.getCommonNameAttribute()); "=" +
groupNameFilter.append("="); (groupName == null ? "*" : LdapUtils.escapeLDAPSearchFilter(groupName)) +
groupNameFilter.append((groupName == null ? "*" : LdapUtils.escapeLDAPSearchFilter(groupName))); ")";
groupNameFilter.append(")");
final StringBuilder result = new StringBuilder(); return "(&" +
result.append("(&"); groupObjectFilter +
result.append(groupObjectFilter); groupNameFilter +
result.append(groupNameFilter); ")";
result.append(")");
return result.toString();
} }
@Override @Override
@ -186,17 +186,9 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
basedn = _ldapConfiguration.getBaseDn(domainId); basedn = _ldapConfiguration.getBaseDn(domainId);
} }
final StringBuilder userObjectFilter = new StringBuilder(); final StringBuilder userObjectFilter = getUserObjectFilter(domainId);
userObjectFilter.append("(objectClass=");
userObjectFilter.append(_ldapConfiguration.getUserObject(domainId));
userObjectFilter.append(")");
final StringBuilder usernameFilter = new StringBuilder(); final StringBuilder usernameFilter = getUsernameFilter(username, domainId);
usernameFilter.append("(");
usernameFilter.append(_ldapConfiguration.getUsernameAttribute(domainId));
usernameFilter.append("=");
usernameFilter.append((username == null ? "*" : LdapUtils.escapeLDAPSearchFilter(username)));
usernameFilter.append(")");
final StringBuilder memberOfFilter = new StringBuilder(); final StringBuilder memberOfFilter = new StringBuilder();
if ("GROUP".equals(type)) { if ("GROUP".equals(type)) {
@ -205,18 +197,17 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
memberOfFilter.append(")"); memberOfFilter.append(")");
} }
final StringBuilder searchQuery = new StringBuilder(); String searchQuery = "(&" +
searchQuery.append("(&"); userObjectFilter +
searchQuery.append(userObjectFilter); usernameFilter +
searchQuery.append(usernameFilter); memberOfFilter +
searchQuery.append(memberOfFilter); ")";
searchQuery.append(")");
return searchUser(basedn, searchQuery.toString(), context, domainId); return searchUser(basedn, searchQuery, context, domainId);
} }
protected String getMemberOfAttribute(final Long domainId) { protected String getMemberOfAttribute(final Long domainId) {
return _ldapConfiguration.getUserMemberOfAttribute(domainId); return LdapConfiguration.getUserMemberOfAttribute(domainId);
} }
@Override @Override
@ -243,7 +234,7 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
NamingEnumeration<SearchResult> result = context.search(_ldapConfiguration.getBaseDn(domainId), generateGroupSearchFilter(groupName, domainId), controls); NamingEnumeration<SearchResult> result = context.search(_ldapConfiguration.getBaseDn(domainId), generateGroupSearchFilter(groupName, domainId), controls);
final List<LdapUser> users = new ArrayList<LdapUser>(); final List<LdapUser> users = new ArrayList<>();
//Expecting only one result which has all the users //Expecting only one result which has all the users
if (result.hasMoreElements()) { if (result.hasMoreElements()) {
Attribute attribute = result.nextElement().getAttributes().get(attributeName); Attribute attribute = result.nextElement().getAttributes().get(attributeName);
@ -254,7 +245,7 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
try{ try{
users.add(getUserForDn(userdn, context, domainId)); users.add(getUserForDn(userdn, context, domainId));
} catch (NamingException e){ } catch (NamingException e){
logger.info("Userdn: " + userdn + " Not Found:: Exception message: " + e.getMessage()); logger.info("Userdn: {} Not Found:: Exception message: {}", userdn, e.getMessage());
} }
} }
} }
@ -286,17 +277,15 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
return false; return false;
} }
public LdapUser searchUser(final String basedn, final String searchString, final LdapContext context, Long domainId) throws NamingException, IOException { public LdapUser searchUser(final String basedn, final String searchString, final LdapContext context, Long domainId) throws NamingException {
final SearchControls searchControls = new SearchControls(); final SearchControls searchControls = new SearchControls();
searchControls.setSearchScope(_ldapConfiguration.getScope()); searchControls.setSearchScope(_ldapConfiguration.getScope());
searchControls.setReturningAttributes(_ldapConfiguration.getReturnAttributes(domainId)); searchControls.setReturningAttributes(_ldapConfiguration.getReturnAttributes(domainId));
NamingEnumeration<SearchResult> results = context.search(basedn, searchString, searchControls); NamingEnumeration<SearchResult> results = context.search(basedn, searchString, searchControls);
if(logger.isDebugEnabled()) { logger.debug("searching user(s) with filter: \"{}\"", searchString);
logger.debug("searching user(s) with filter: \"" + searchString + "\""); final List<LdapUser> users = new ArrayList<>();
}
final List<LdapUser> users = new ArrayList<LdapUser>();
while (results.hasMoreElements()) { while (results.hasMoreElements()) {
final SearchResult result = results.nextElement(); final SearchResult result = results.nextElement();
users.add(createUser(result, domainId)); users.add(createUser(result, domainId));
@ -324,7 +313,7 @@ public class OpenLdapUserManagerImpl implements LdapUserManager {
byte[] cookie = null; byte[] cookie = null;
int pageSize = _ldapConfiguration.getLdapPageSize(domainId); int pageSize = _ldapConfiguration.getLdapPageSize(domainId);
context.setRequestControls(new Control[]{new PagedResultsControl(pageSize, Control.NONCRITICAL)}); context.setRequestControls(new Control[]{new PagedResultsControl(pageSize, Control.NONCRITICAL)});
final List<LdapUser> users = new ArrayList<LdapUser>(); final List<LdapUser> users = new ArrayList<>();
NamingEnumeration<SearchResult> results; NamingEnumeration<SearchResult> results;
do { do {
results = context.search(basedn, generateSearchFilter(username, domainId), searchControls); results = context.search(basedn, generateSearchFilter(username, domainId), searchControls);

View File

@ -54,9 +54,8 @@ public class ADLdapUserManagerImplTest {
String [] groups = {"dev", "dev-hyd"}; String [] groups = {"dev", "dev-hyd"};
for (String group: groups) { for (String group: groups) {
String result = adLdapUserManager.generateADGroupSearchFilter(group, 1L); String result = adLdapUserManager.generateADGroupSearchFilter(group, 1L);
assertTrue(("(&(objectClass=user)(memberOf:1.2.840.113556.1.4.1941:=CN=" + group + ",DC=cloud,DC=citrix,DC=com))").equals(result)); assertTrue(("(&(&(objectCategory=person)(objectClass=user))(memberOf:1.2.840.113556.1.4.1941:=CN=" + group + ",DC=cloud,DC=citrix,DC=com))").equals(result));
} }
} }
@Test @Test
@ -69,7 +68,7 @@ public class ADLdapUserManagerImplTest {
String [] groups = {"dev", "dev-hyd"}; String [] groups = {"dev", "dev-hyd"};
for (String group: groups) { for (String group: groups) {
String result = adLdapUserManager.generateADGroupSearchFilter(group, 1L); String result = adLdapUserManager.generateADGroupSearchFilter(group, 1L);
assertTrue(("(&(objectClass=user)(memberOf=CN=" + group + ",DC=cloud,DC=citrix,DC=com))").equals(result)); assertTrue(("(&(&(objectCategory=person)(objectClass=user))(memberOf=CN=" + group + ",DC=cloud,DC=citrix,DC=com))").equals(result));
} }
} }

View File

@ -1038,15 +1038,19 @@
<exclude>dist/console-proxy/js/jquery.js</exclude> <exclude>dist/console-proxy/js/jquery.js</exclude>
<exclude>engine/schema/dist/**</exclude> <exclude>engine/schema/dist/**</exclude>
<exclude>plugins/hypervisors/hyperv/conf/agent.properties</exclude> <exclude>plugins/hypervisors/hyperv/conf/agent.properties</exclude>
<exclude>plugins/hypervisors/hyperv/conf/uefi.properties</exclude>
<exclude>plugins/hypervisors/hyperv/DotNet/ServerResource/**</exclude> <exclude>plugins/hypervisors/hyperv/DotNet/ServerResource/**</exclude>
<exclude>scripts/installer/windows/acs_license.rtf</exclude> <exclude>scripts/installer/windows/acs_license.rtf</exclude>
<exclude>scripts/vm/systemvm/id_rsa.cloud</exclude> <exclude>scripts/vm/systemvm/id_rsa.cloud</exclude>
<exclude>services/console-proxy/server/conf/agent.properties</exclude> <exclude>services/console-proxy/server/conf/agent.properties</exclude>
<exclude>services/console-proxy/server/conf/uefi.properties</exclude>
<exclude>services/console-proxy/server/conf/environment.properties</exclude> <exclude>services/console-proxy/server/conf/environment.properties</exclude>
<exclude>services/console-proxy/server/js/jquery.js</exclude> <exclude>services/console-proxy/server/js/jquery.js</exclude>
<exclude>services/secondary-storage/conf/agent.properties</exclude> <exclude>services/secondary-storage/conf/agent.properties</exclude>
<exclude>services/secondary-storage/conf/uefi.properties</exclude>
<exclude>services/secondary-storage/conf/environment.properties</exclude> <exclude>services/secondary-storage/conf/environment.properties</exclude>
<exclude>systemvm/agent/conf/agent.properties</exclude> <exclude>systemvm/agent/conf/agent.properties</exclude>
<exclude>systemvm/agent/conf/uefi.properties</exclude>
<exclude>systemvm/agent/conf/environment.properties</exclude> <exclude>systemvm/agent/conf/environment.properties</exclude>
<exclude>systemvm/agent/js/jquery.js</exclude> <exclude>systemvm/agent/js/jquery.js</exclude>
<exclude>systemvm/agent/js/jquery.flot.navigate.js</exclude> <exclude>systemvm/agent/js/jquery.flot.navigate.js</exclude>

View File

@ -4302,6 +4302,9 @@ public class ApiResponseHelper implements ResponseGenerator {
if (volume != null) { if (volume != null) {
builder.append("for ").append(volume.getName()).append(" (").append(volume.getUuid()).append(")"); builder.append("for ").append(volume.getName()).append(" (").append(volume.getUuid()).append(")");
} }
if (vmInstance != null) {
builder.append(" attached to VM ").append(vmInstance.getHostName()).append(" (").append(vmInstance.getUuid()).append(")");
}
if (diskOff != null) { if (diskOff != null) {
builder.append(" with disk offering ").append(diskOff.getName()).append(" (").append(diskOff.getUuid()).append(")"); builder.append(" with disk offering ").append(diskOff.getName()).append(" (").append(diskOff.getUuid()).append(")");
} }

View File

@ -39,6 +39,7 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Arrays;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
@ -251,6 +252,12 @@ public class ApiServer extends ManagerBase implements HttpRequestHandler, ApiSer
@Inject @Inject
private MessageBus messageBus; private MessageBus messageBus;
private static final Set<String> sensitiveFields = new HashSet<>(Arrays.asList(
"password", "secretkey", "apikey", "token",
"sessionkey", "accesskey", "signature",
"authorization", "credential", "secret"
));
private static final ConfigKey<Integer> IntegrationAPIPort = new ConfigKey<>(ConfigKey.CATEGORY_ADVANCED private static final ConfigKey<Integer> IntegrationAPIPort = new ConfigKey<>(ConfigKey.CATEGORY_ADVANCED
, Integer.class , Integer.class
, "integration.api.port" , "integration.api.port"
@ -624,10 +631,23 @@ public class ApiServer extends ManagerBase implements HttpRequestHandler, ApiSer
logger.error("invalid request, no command sent"); logger.error("invalid request, no command sent");
if (logger.isTraceEnabled()) { if (logger.isTraceEnabled()) {
logger.trace("dumping request parameters"); logger.trace("dumping request parameters");
for (final Object key : params.keySet()) {
final String keyStr = (String)key; for (final Object key : params.keySet()) {
final String[] value = (String[])params.get(key); final String keyStr = (String) key;
logger.trace(" key: " + keyStr + ", value: " + ((value == null) ? "'null'" : value[0])); final String[] value = (String[]) params.get(key);
String lowerKeyStr = keyStr.toLowerCase();
boolean isSensitive = sensitiveFields.stream()
.anyMatch(lowerKeyStr::contains);
String logValue;
if (isSensitive) {
logValue = "******"; // mask sensitive values
} else {
logValue = (value == null) ? "'null'" : value[0];
}
logger.trace(" key: " + keyStr + ", value: " + logValue);
} }
} }
throw new ServerApiException(ApiErrorCode.UNSUPPORTED_ACTION_ERROR, "Invalid request, no command sent"); throw new ServerApiException(ApiErrorCode.UNSUPPORTED_ACTION_ERROR, "Invalid request, no command sent");

View File

@ -21,6 +21,8 @@ import com.cloud.agent.api.to.DataObjectType;
import com.cloud.agent.api.to.NicTO; import com.cloud.agent.api.to.NicTO;
import com.cloud.agent.api.to.VirtualMachineTO; import com.cloud.agent.api.to.VirtualMachineTO;
import com.cloud.configuration.ConfigurationManagerImpl; import com.cloud.configuration.ConfigurationManagerImpl;
import com.cloud.event.EventTypes;
import com.cloud.event.UsageEventUtils;
import com.cloud.host.HostVO; import com.cloud.host.HostVO;
import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao; import com.cloud.hypervisor.dao.HypervisorCapabilitiesDao;
@ -372,6 +374,8 @@ public class KVMGuru extends HypervisorGuruBase implements HypervisorGuru {
_volumeDao.update(volume.getId(), volume); _volumeDao.update(volume.getId(), volume);
_volumeDao.attachVolume(volume.getId(), vm.getId(), getNextAvailableDeviceId(vmVolumes)); _volumeDao.attachVolume(volume.getId(), vm.getId(), getNextAvailableDeviceId(vmVolumes));
} }
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_ATTACH, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), vm.getId(), volume.isDisplay());
} }
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Could not restore VM " + vm.getName() + " due to : " + e.getMessage()); throw new RuntimeException("Could not restore VM " + vm.getName() + " due to : " + e.getMessage());
@ -389,6 +393,8 @@ public class KVMGuru extends HypervisorGuruBase implements HypervisorGuru {
_volumeDao.attachVolume(restoredVolume.getId(), vm.getId(), getNextAvailableDeviceId(vmVolumes)); _volumeDao.attachVolume(restoredVolume.getId(), vm.getId(), getNextAvailableDeviceId(vmVolumes));
restoredVolume.setState(Volume.State.Ready); restoredVolume.setState(Volume.State.Ready);
_volumeDao.update(restoredVolume.getId(), restoredVolume); _volumeDao.update(restoredVolume.getId(), restoredVolume);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_ATTACH, restoredVolume.getAccountId(), restoredVolume.getDataCenterId(), restoredVolume.getId(), restoredVolume.getName(),
restoredVolume.getDiskOfferingId(), restoredVolume.getTemplateId(), restoredVolume.getSize(), Volume.class.getName(), restoredVolume.getUuid(), vm.getId(), restoredVolume.isDisplay());
return true; return true;
} catch (Exception e) { } catch (Exception e) {
restoredVolume.setDisplay(false); restoredVolume.setDisplay(false);

View File

@ -823,6 +823,7 @@ import com.cloud.user.dao.AccountDao;
import com.cloud.user.dao.SSHKeyPairDao; import com.cloud.user.dao.SSHKeyPairDao;
import com.cloud.user.dao.UserDao; import com.cloud.user.dao.UserDao;
import com.cloud.user.dao.UserDataDao; import com.cloud.user.dao.UserDataDao;
import com.cloud.utils.EnumUtils;
import com.cloud.utils.NumbersUtil; import com.cloud.utils.NumbersUtil;
import com.cloud.utils.Pair; import com.cloud.utils.Pair;
import com.cloud.utils.PasswordGenerator; import com.cloud.utils.PasswordGenerator;
@ -1410,7 +1411,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
if (vmInstanceDetailVO != null && if (vmInstanceDetailVO != null &&
(ApiConstants.BootMode.LEGACY.toString().equalsIgnoreCase(vmInstanceDetailVO.getValue()) || (ApiConstants.BootMode.LEGACY.toString().equalsIgnoreCase(vmInstanceDetailVO.getValue()) ||
ApiConstants.BootMode.SECURE.toString().equalsIgnoreCase(vmInstanceDetailVO.getValue()))) { ApiConstants.BootMode.SECURE.toString().equalsIgnoreCase(vmInstanceDetailVO.getValue()))) {
logger.info(" Live Migration of UEFI enabled VM : " + vm.getInstanceName() + " is not supported"); logger.debug("{} VM is UEFI enabled, Checking for other UEFI enabled hosts as it can be live migrated to UEFI enabled host only.", vm.getInstanceName());
if (CollectionUtils.isEmpty(filteredHosts)) { if (CollectionUtils.isEmpty(filteredHosts)) {
filteredHosts = new ArrayList<>(allHosts); filteredHosts = new ArrayList<>(allHosts);
} }
@ -1420,6 +1421,9 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
return new Pair<>(false, null); return new Pair<>(false, null);
} }
filteredHosts.removeIf(host -> !uefiEnabledHosts.contains(host.getId())); filteredHosts.removeIf(host -> !uefiEnabledHosts.contains(host.getId()));
if (filteredHosts.isEmpty()) {
logger.warn("No UEFI enabled hosts are available for the live migration of VM {}", vm.getInstanceName());
}
return new Pair<>(!filteredHosts.isEmpty(), filteredHosts); return new Pair<>(!filteredHosts.isEmpty(), filteredHosts);
} }
return new Pair<>(true, filteredHosts); return new Pair<>(true, filteredHosts);
@ -2416,6 +2420,22 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
return new Pair<>(result.first(), result.second()); return new Pair<>(result.first(), result.second());
} }
protected List<IpAddress.State> getStatesForIpAddressSearch(final ListPublicIpAddressesCmd cmd) {
final String statesStr = cmd.getState();
final List<IpAddress.State> states = new ArrayList<>();
if (StringUtils.isBlank(statesStr)) {
return states;
}
for (String s : StringUtils.split(statesStr, ",")) {
IpAddress.State state = EnumUtils.getEnumIgnoreCase(IpAddress.State.class, s.trim());
if (state == null) {
throw new InvalidParameterValueException("Invalid state: " + s);
}
states.add(state);
}
return states;
}
@Override @Override
public Pair<List<? extends IpAddress>, Integer> searchForIPAddresses(final ListPublicIpAddressesCmd cmd) { public Pair<List<? extends IpAddress>, Integer> searchForIPAddresses(final ListPublicIpAddressesCmd cmd) {
final Long associatedNetworkId = cmd.getAssociatedNetworkId(); final Long associatedNetworkId = cmd.getAssociatedNetworkId();
@ -2426,20 +2446,20 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
final Long networkId = cmd.getNetworkId(); final Long networkId = cmd.getNetworkId();
final Long vpcId = cmd.getVpcId(); final Long vpcId = cmd.getVpcId();
final String state = cmd.getState(); final List<IpAddress.State> states = getStatesForIpAddressSearch(cmd);
Boolean isAllocated = cmd.isAllocatedOnly(); Boolean isAllocated = cmd.isAllocatedOnly();
if (isAllocated == null) { if (isAllocated == null) {
if (state != null && (state.equalsIgnoreCase(IpAddress.State.Free.name()) || state.equalsIgnoreCase(IpAddress.State.Reserved.name()))) { if (states.contains(IpAddress.State.Free) || states.contains(IpAddress.State.Reserved)) {
isAllocated = Boolean.FALSE; isAllocated = Boolean.FALSE;
} else { } else {
isAllocated = Boolean.TRUE; // default isAllocated = Boolean.TRUE; // default
} }
} else { } else {
if (state != null && (state.equalsIgnoreCase(IpAddress.State.Free.name()) || state.equalsIgnoreCase(IpAddress.State.Reserved.name()))) { if (states.contains(IpAddress.State.Free) || states.contains(IpAddress.State.Reserved)) {
if (isAllocated) { if (isAllocated) {
throw new InvalidParameterValueException("Conflict: allocatedonly is true but state is Free"); throw new InvalidParameterValueException("Conflict: allocatedonly is true but state is Free");
} }
} else if (state != null && state.equalsIgnoreCase(IpAddress.State.Allocated.name())) { } else if (states.contains(IpAddress.State.Allocated)) {
isAllocated = Boolean.TRUE; isAllocated = Boolean.TRUE;
} }
} }
@ -2518,10 +2538,8 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
Boolean isRecursive = cmd.isRecursive(); Boolean isRecursive = cmd.isRecursive();
final List<Long> permittedAccounts = new ArrayList<>(); final List<Long> permittedAccounts = new ArrayList<>();
ListProjectResourcesCriteria listProjectResourcesCriteria = null; ListProjectResourcesCriteria listProjectResourcesCriteria = null;
boolean isAllocatedOrReserved = false; boolean isAllocatedOrReserved = isAllocated ||
if (isAllocated || IpAddress.State.Reserved.name().equalsIgnoreCase(state)) { (states.size() == 1 && IpAddress.State.Reserved.equals(states.get(0)));
isAllocatedOrReserved = true;
}
if (isAllocatedOrReserved || (vlanType == VlanType.VirtualNetwork && (caller.getType() != Account.Type.ADMIN || cmd.getDomainId() != null))) { if (isAllocatedOrReserved || (vlanType == VlanType.VirtualNetwork && (caller.getType() != Account.Type.ADMIN || cmd.getDomainId() != null))) {
final Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<>(cmd.getDomainId(), cmd.isRecursive(), final Ternary<Long, Boolean, ListProjectResourcesCriteria> domainIdRecursiveListProject = new Ternary<>(cmd.getDomainId(), cmd.isRecursive(),
null); null);
@ -2535,7 +2553,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
buildParameters(sb, cmd, vlanType == VlanType.VirtualNetwork ? true : isAllocated); buildParameters(sb, cmd, vlanType == VlanType.VirtualNetwork ? true : isAllocated);
SearchCriteria<IPAddressVO> sc = sb.create(); SearchCriteria<IPAddressVO> sc = sb.create();
setParameters(sc, cmd, vlanType, isAllocated); setParameters(sc, cmd, vlanType, isAllocated, states);
if (isAllocatedOrReserved || (vlanType == VlanType.VirtualNetwork && (caller.getType() != Account.Type.ADMIN || cmd.getDomainId() != null))) { if (isAllocatedOrReserved || (vlanType == VlanType.VirtualNetwork && (caller.getType() != Account.Type.ADMIN || cmd.getDomainId() != null))) {
_accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria); _accountMgr.buildACLSearchCriteria(sc, domainId, isRecursive, permittedAccounts, listProjectResourcesCriteria);
@ -2603,7 +2621,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
buildParameters(searchBuilder, cmd, false); buildParameters(searchBuilder, cmd, false);
SearchCriteria<IPAddressVO> searchCriteria = searchBuilder.create(); SearchCriteria<IPAddressVO> searchCriteria = searchBuilder.create();
setParameters(searchCriteria, cmd, vlanType, false); setParameters(searchCriteria, cmd, vlanType, false, states);
searchCriteria.setParameters("state", IpAddress.State.Free.name()); searchCriteria.setParameters("state", IpAddress.State.Free.name());
addrs.addAll(_publicIpAddressDao.search(searchCriteria, searchFilter)); // Free IPs on shared network addrs.addAll(_publicIpAddressDao.search(searchCriteria, searchFilter)); // Free IPs on shared network
} }
@ -2616,7 +2634,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
sb2.and("quarantinedPublicIpsIdsNIN", sb2.entity().getId(), SearchCriteria.Op.NIN); sb2.and("quarantinedPublicIpsIdsNIN", sb2.entity().getId(), SearchCriteria.Op.NIN);
SearchCriteria<IPAddressVO> sc2 = sb2.create(); SearchCriteria<IPAddressVO> sc2 = sb2.create();
setParameters(sc2, cmd, vlanType, isAllocated); setParameters(sc2, cmd, vlanType, isAllocated, states);
sc2.setParameters("ids", freeAddrIds.toArray()); sc2.setParameters("ids", freeAddrIds.toArray());
_publicIpAddressDao.buildQuarantineSearchCriteria(sc2); _publicIpAddressDao.buildQuarantineSearchCriteria(sc2);
addrs.addAll(_publicIpAddressDao.search(sc2, searchFilter)); // Allocated + Free addrs.addAll(_publicIpAddressDao.search(sc2, searchFilter)); // Allocated + Free
@ -2646,7 +2664,7 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
sb.and("isSourceNat", sb.entity().isSourceNat(), SearchCriteria.Op.EQ); sb.and("isSourceNat", sb.entity().isSourceNat(), SearchCriteria.Op.EQ);
sb.and("isStaticNat", sb.entity().isOneToOneNat(), SearchCriteria.Op.EQ); sb.and("isStaticNat", sb.entity().isOneToOneNat(), SearchCriteria.Op.EQ);
sb.and("vpcId", sb.entity().getVpcId(), SearchCriteria.Op.EQ); sb.and("vpcId", sb.entity().getVpcId(), SearchCriteria.Op.EQ);
sb.and("state", sb.entity().getState(), SearchCriteria.Op.EQ); sb.and("state", sb.entity().getState(), SearchCriteria.Op.IN);
sb.and("display", sb.entity().isDisplay(), SearchCriteria.Op.EQ); sb.and("display", sb.entity().isDisplay(), SearchCriteria.Op.EQ);
sb.and(FOR_SYSTEMVMS, sb.entity().isForSystemVms(), SearchCriteria.Op.EQ); sb.and(FOR_SYSTEMVMS, sb.entity().isForSystemVms(), SearchCriteria.Op.EQ);
@ -2689,7 +2707,8 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
} }
} }
protected void setParameters(SearchCriteria<IPAddressVO> sc, final ListPublicIpAddressesCmd cmd, VlanType vlanType, Boolean isAllocated) { protected void setParameters(SearchCriteria<IPAddressVO> sc, final ListPublicIpAddressesCmd cmd, VlanType vlanType,
Boolean isAllocated, List<IpAddress.State> states) {
final Object keyword = cmd.getKeyword(); final Object keyword = cmd.getKeyword();
final Long physicalNetworkId = cmd.getPhysicalNetworkId(); final Long physicalNetworkId = cmd.getPhysicalNetworkId();
final Long sourceNetworkId = cmd.getNetworkId(); final Long sourceNetworkId = cmd.getNetworkId();
@ -2700,7 +2719,6 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
final Boolean sourceNat = cmd.isSourceNat(); final Boolean sourceNat = cmd.isSourceNat();
final Boolean staticNat = cmd.isStaticNat(); final Boolean staticNat = cmd.isStaticNat();
final Boolean forDisplay = cmd.getDisplay(); final Boolean forDisplay = cmd.getDisplay();
final String state = cmd.getState();
final Boolean forSystemVms = cmd.getForSystemVMs(); final Boolean forSystemVms = cmd.getForSystemVMs();
final boolean forProvider = cmd.isForProvider(); final boolean forProvider = cmd.isForProvider();
final Map<String, String> tags = cmd.getTags(); final Map<String, String> tags = cmd.getTags();
@ -2757,13 +2775,14 @@ public class ManagementServerImpl extends ManagerBase implements ManagementServe
sc.setParameters("display", forDisplay); sc.setParameters("display", forDisplay);
} }
if (state != null) { if (CollectionUtils.isNotEmpty(states)) {
sc.setParameters("state", state); sc.setParameters("state", states.toArray());
} else if (isAllocated != null && isAllocated) { } else if (isAllocated != null && isAllocated) {
sc.setParameters("state", IpAddress.State.Allocated); sc.setParameters("state", IpAddress.State.Allocated);
} }
if (IpAddressManagerImpl.getSystemvmpublicipreservationmodestrictness().value() && IpAddress.State.Free.name().equalsIgnoreCase(state)) { if (IpAddressManagerImpl.getSystemvmpublicipreservationmodestrictness().value() &&
states.contains(IpAddress.State.Free)) {
sc.setParameters(FOR_SYSTEMVMS, false); sc.setParameters(FOR_SYSTEMVMS, false);
} else { } else {
sc.setParameters(FOR_SYSTEMVMS, forSystemVms); sc.setParameters(FOR_SYSTEMVMS, forSystemVms);

View File

@ -1004,7 +1004,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
if (snapshotId == null && displayVolume) { if (snapshotId == null && displayVolume) {
// for volume created from snapshot, create usage event after volume creation // for volume created from snapshot, create usage event after volume creation
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId, null, size, UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), diskOfferingId, null, size,
Volume.class.getName(), volume.getUuid(), displayVolume); Volume.class.getName(), volume.getUuid(), volume.getInstanceId(), displayVolume);
} }
if (volume != null && details != null) { if (volume != null && details != null) {
@ -1106,7 +1106,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
createdVolume = _volumeMgr.createVolumeFromSnapshot(volume, snapshot, vm); createdVolume = _volumeMgr.createVolumeFromSnapshot(volume, snapshot, vm);
VolumeVO volumeVo = _volsDao.findById(createdVolume.getId()); VolumeVO volumeVo = _volsDao.findById(createdVolume.getId());
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(), createdVolume.getName(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, createdVolume.getAccountId(), createdVolume.getDataCenterId(), createdVolume.getId(), createdVolume.getName(),
createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid(), volumeVo.isDisplayVolume()); createdVolume.getDiskOfferingId(), null, createdVolume.getSize(), Volume.class.getName(), createdVolume.getUuid(), volume.getInstanceId(), volumeVo.isDisplayVolume());
return volumeVo; return volumeVo;
} }
@ -1578,6 +1578,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
} }
} }
volume = _volsDao.findById(volumeId);
if (newDiskOfferingId != null) { if (newDiskOfferingId != null) {
volume.setDiskOfferingId(newDiskOfferingId); volume.setDiskOfferingId(newDiskOfferingId);
_volumeMgr.saveVolumeDetails(newDiskOfferingId, volume.getId()); _volumeMgr.saveVolumeDetails(newDiskOfferingId, volume.getId());
@ -1592,7 +1593,6 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
} }
// Update size if volume has same size as before, else it is already updated // Update size if volume has same size as before, else it is already updated
volume = _volsDao.findById(volumeId);
if (currentSize == volume.getSize() && currentSize != newSize) { if (currentSize == volume.getSize() && currentSize != newSize) {
volume.setSize(newSize); volume.setSize(newSize);
} else if (volume.getSize() != newSize) { } else if (volume.getSize() != newSize) {
@ -1903,7 +1903,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
} }
UsageEventUtils UsageEventUtils
.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), offeringId, .publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), offeringId,
volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), volume.isDisplay()); volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), volume.getInstanceId(), volume.isDisplay());
logger.debug("Volume [{}] has been successfully recovered, thus a new usage event {} has been published.", volume, EventTypes.EVENT_VOLUME_CREATE); logger.debug("Volume [{}] has been successfully recovered, thus a new usage event {} has been published.", volume, EventTypes.EVENT_VOLUME_CREATE);
} }
@ -2998,7 +2998,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
if (displayVolume) { if (displayVolume) {
// flag turned 1 equivalent to freshly created volume // flag turned 1 equivalent to freshly created volume
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), volume.getDiskOfferingId(),
volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid()); volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), volume.getInstanceId(), displayVolume);
} else { } else {
// flag turned 0 equivalent to deleting a volume // flag turned 0 equivalent to deleting a volume
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), Volume.class.getName(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), Volume.class.getName(),
@ -3259,6 +3259,8 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
handleTargetsForVMware(hostId, volumePool.getHostAddress(), volumePool.getPort(), volume.get_iScsiName()); handleTargetsForVMware(hostId, volumePool.getHostAddress(), volumePool.getPort(), volume.get_iScsiName());
} }
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DETACH, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), null, volume.getSize(), Volume.class.getName(), volume.getUuid(), null, volume.isDisplay());
return _volsDao.findById(volumeId); return _volsDao.findById(volumeId);
} else { } else {
@ -4339,7 +4341,7 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
diskOfferingVO); diskOfferingVO);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(),
volume.getUuid(), volume.isDisplayVolume()); volume.getUuid(), volume.getInstanceId(), volume.isDisplayVolume());
volService.moveVolumeOnSecondaryStorageToAnotherAccount(volume, oldAccount, newAccount); volService.moveVolumeOnSecondaryStorageToAnotherAccount(volume, oldAccount, newAccount);
} }
@ -4863,6 +4865,9 @@ public class VolumeApiServiceImpl extends ManagerBase implements VolumeApiServic
if (attached) { if (attached) {
ev = Volume.Event.OperationSucceeded; ev = Volume.Event.OperationSucceeded;
logger.debug("Volume: {} successfully attached to VM: {}", volInfo.getVolume(), volInfo.getAttachedVM()); logger.debug("Volume: {} successfully attached to VM: {}", volInfo.getVolume(), volInfo.getAttachedVM());
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_ATTACH, volumeToAttach.getAccountId(), volumeToAttach.getDataCenterId(), volumeToAttach.getId(), volumeToAttach.getName(),
volumeToAttach.getDiskOfferingId(), volumeToAttach.getTemplateId(), volumeToAttach.getSize(), Volume.class.getName(), volumeToAttach.getUuid(), vm.getId(), volumeToAttach.isDisplay());
provideVMInfo(dataStore, vm.getId(), volInfo.getId()); provideVMInfo(dataStore, vm.getId(), volInfo.getId());
} else { } else {
logger.debug("Volume: {} failed to attach to VM: {}", volInfo.getVolume(), volInfo.getAttachedVM()); logger.debug("Volume: {} failed to attach to VM: {}", volInfo.getVolume(), volInfo.getAttachedVM());

View File

@ -81,7 +81,7 @@ public class VolumeStateListener implements StateListener<State, Event, Volume>
// For the Resize Volume Event, this publishes an event with an incorrect disk offering ID, so do nothing for now // For the Resize Volume Event, this publishes an event with an incorrect disk offering ID, so do nothing for now
} else { } else {
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), vol.getDiskOfferingId(), null, vol.getSize(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), vol.getDiskOfferingId(), null, vol.getSize(),
Volume.class.getName(), vol.getUuid(), vol.isDisplayVolume()); Volume.class.getName(), vol.getUuid(), instanceId, vol.isDisplayVolume());
} }
} else if (transition.getToState() == State.Destroy && vol.getVolumeType() != Volume.Type.ROOT) { //Do not Publish Usage Event for ROOT Disk as it would have been published already while destroying a VM } else if (transition.getToState() == State.Destroy && vol.getVolumeType() != Volume.Type.ROOT) { //Do not Publish Usage Event for ROOT Disk as it would have been published already while destroying a VM
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_DELETE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(),

View File

@ -2408,6 +2408,8 @@ public class UserVmManagerImpl extends ManagerBase implements UserVmManager, Vir
if (Volume.State.Destroy.equals(volume.getState())) { if (Volume.State.Destroy.equals(volume.getState())) {
_volumeService.recoverVolume(volume.getId()); _volumeService.recoverVolume(volume.getId());
_volsDao.attachVolume(volume.getId(), vmId, ROOT_DEVICE_ID); _volsDao.attachVolume(volume.getId(), vmId, ROOT_DEVICE_ID);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_ATTACH, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), vmId, volume.isDisplay());
} else { } else {
_volumeService.publishVolumeCreationUsageEvent(volume); _volumeService.publishVolumeCreationUsageEvent(volume);
} }
@ -8156,7 +8158,7 @@ public class UserVmManagerImpl extends ManagerBase implements UserVmManager, Vir
logger.trace("Generating a create volume event for volume [{}].", volume); logger.trace("Generating a create volume event for volume [{}].", volume);
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(), UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volume.getAccountId(), volume.getDataCenterId(), volume.getId(), volume.getName(),
volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), volume.isDisplayVolume()); volume.getDiskOfferingId(), volume.getTemplateId(), volume.getSize(), Volume.class.getName(), volume.getUuid(), volume.getInstanceId(), volume.isDisplayVolume());
} }
} }
@ -8959,6 +8961,8 @@ public class UserVmManagerImpl extends ManagerBase implements UserVmManager, Vir
handleManagedStorage(vm, root); handleManagedStorage(vm, root);
_volsDao.attachVolume(newVol.getId(), vmId, newVol.getDeviceId()); _volsDao.attachVolume(newVol.getId(), vmId, newVol.getDeviceId());
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_ATTACH, newVol.getAccountId(), newVol.getDataCenterId(), newVol.getId(), newVol.getName(),
newVol.getDiskOfferingId(), newVol.getTemplateId(), newVol.getSize(), Volume.class.getName(), newVol.getUuid(), vmId, newVol.isDisplay());
// Detach, destroy and create the usage event for the old root volume. // Detach, destroy and create the usage event for the old root volume.
_volsDao.detachVolume(root.getId()); _volsDao.detachVolume(root.getId());

View File

@ -26,6 +26,7 @@ import static org.mockito.Mockito.when;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.cloudstack.annotation.dao.AnnotationDao; import org.apache.cloudstack.annotation.dao.AnnotationDao;
@ -258,14 +259,14 @@ public class ManagementServerImplTest {
Mockito.when(cmd.getId()).thenReturn(null); Mockito.when(cmd.getId()).thenReturn(null);
Mockito.when(cmd.isSourceNat()).thenReturn(null); Mockito.when(cmd.isSourceNat()).thenReturn(null);
Mockito.when(cmd.isStaticNat()).thenReturn(null); Mockito.when(cmd.isStaticNat()).thenReturn(null);
Mockito.when(cmd.getState()).thenReturn(IpAddress.State.Free.name());
Mockito.when(cmd.getTags()).thenReturn(null); Mockito.when(cmd.getTags()).thenReturn(null);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.FALSE); List<IpAddress.State> states = Collections.singletonList(IpAddress.State.Free);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.FALSE, states);
Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork); Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork);
Mockito.verify(sc, Mockito.times(1)).setParameters("display", false); Mockito.verify(sc, Mockito.times(1)).setParameters("display", false);
Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L); Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L);
Mockito.verify(sc, Mockito.times(1)).setParameters("state", "Free"); Mockito.verify(sc, Mockito.times(1)).setParameters("state", states.toArray());
Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false); Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false);
} }
@ -281,14 +282,14 @@ public class ManagementServerImplTest {
Mockito.when(cmd.getId()).thenReturn(null); Mockito.when(cmd.getId()).thenReturn(null);
Mockito.when(cmd.isSourceNat()).thenReturn(null); Mockito.when(cmd.isSourceNat()).thenReturn(null);
Mockito.when(cmd.isStaticNat()).thenReturn(null); Mockito.when(cmd.isStaticNat()).thenReturn(null);
Mockito.when(cmd.getState()).thenReturn(IpAddress.State.Free.name());
Mockito.when(cmd.getTags()).thenReturn(null); Mockito.when(cmd.getTags()).thenReturn(null);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.FALSE); List<IpAddress.State> states = Collections.singletonList(IpAddress.State.Free);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.FALSE, states);
Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork); Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork);
Mockito.verify(sc, Mockito.times(1)).setParameters("display", false); Mockito.verify(sc, Mockito.times(1)).setParameters("display", false);
Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L); Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L);
Mockito.verify(sc, Mockito.times(1)).setParameters("state", "Free"); Mockito.verify(sc, Mockito.times(1)).setParameters("state", states.toArray());
Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false); Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false);
} }
@ -304,13 +305,13 @@ public class ManagementServerImplTest {
Mockito.when(cmd.getId()).thenReturn(null); Mockito.when(cmd.getId()).thenReturn(null);
Mockito.when(cmd.isSourceNat()).thenReturn(null); Mockito.when(cmd.isSourceNat()).thenReturn(null);
Mockito.when(cmd.isStaticNat()).thenReturn(null); Mockito.when(cmd.isStaticNat()).thenReturn(null);
Mockito.when(cmd.getState()).thenReturn(null);
Mockito.when(cmd.getTags()).thenReturn(null); Mockito.when(cmd.getTags()).thenReturn(null);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.TRUE); spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.TRUE, Collections.emptyList());
Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork); Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork);
Mockito.verify(sc, Mockito.times(1)).setParameters("display", false); Mockito.verify(sc, Mockito.times(1)).setParameters("display", false);
Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L); Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L);
Mockito.verify(sc, Mockito.times(1)).setParameters("state", IpAddress.State.Allocated);
Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false); Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false);
} }
@ -326,13 +327,13 @@ public class ManagementServerImplTest {
Mockito.when(cmd.getId()).thenReturn(null); Mockito.when(cmd.getId()).thenReturn(null);
Mockito.when(cmd.isSourceNat()).thenReturn(null); Mockito.when(cmd.isSourceNat()).thenReturn(null);
Mockito.when(cmd.isStaticNat()).thenReturn(null); Mockito.when(cmd.isStaticNat()).thenReturn(null);
Mockito.when(cmd.getState()).thenReturn(null);
Mockito.when(cmd.getTags()).thenReturn(null); Mockito.when(cmd.getTags()).thenReturn(null);
spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.TRUE); spy.setParameters(sc, cmd, VlanType.VirtualNetwork, Boolean.TRUE, Collections.emptyList());
Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork); Mockito.verify(sc, Mockito.times(1)).setJoinParameters("vlanSearch", "vlanType", VlanType.VirtualNetwork);
Mockito.verify(sc, Mockito.times(1)).setParameters("display", false); Mockito.verify(sc, Mockito.times(1)).setParameters("display", false);
Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L); Mockito.verify(sc, Mockito.times(1)).setParameters("sourceNetworkId", 10L);
Mockito.verify(sc, Mockito.times(1)).setParameters("state", IpAddress.State.Allocated);
Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false); Mockito.verify(sc, Mockito.times(1)).setParameters("forsystemvms", false);
} }
@ -1033,4 +1034,49 @@ public class ManagementServerImplTest {
Assert.assertNotNull(spy.getExternalVmConsole(virtualMachine, host)); Assert.assertNotNull(spy.getExternalVmConsole(virtualMachine, host));
Mockito.verify(extensionManager).getInstanceConsole(virtualMachine, host); Mockito.verify(extensionManager).getInstanceConsole(virtualMachine, host);
} }
@Test
public void getStatesForIpAddressSearchReturnsValidStates() {
ListPublicIpAddressesCmd cmd = Mockito.mock(ListPublicIpAddressesCmd.class);
Mockito.when(cmd.getState()).thenReturn("Allocated ,free");
List<IpAddress.State> result = spy.getStatesForIpAddressSearch(cmd);
Assert.assertEquals(2, result.size());
Assert.assertTrue(result.contains(IpAddress.State.Allocated));
Assert.assertTrue(result.contains(IpAddress.State.Free));
}
@Test
public void getStatesForIpAddressSearchReturnsEmptyListForNullState() {
ListPublicIpAddressesCmd cmd = Mockito.mock(ListPublicIpAddressesCmd.class);
Mockito.when(cmd.getState()).thenReturn(null);
List<IpAddress.State> result = spy.getStatesForIpAddressSearch(cmd);
Assert.assertTrue(result.isEmpty());
}
@Test
public void getStatesForIpAddressSearchReturnsEmptyListForBlankState() {
ListPublicIpAddressesCmd cmd = Mockito.mock(ListPublicIpAddressesCmd.class);
Mockito.when(cmd.getState()).thenReturn(" ");
List<IpAddress.State> result = spy.getStatesForIpAddressSearch(cmd);
Assert.assertTrue(result.isEmpty());
}
@Test(expected = InvalidParameterValueException.class)
public void getStatesForIpAddressSearchThrowsExceptionForInvalidState() {
ListPublicIpAddressesCmd cmd = Mockito.mock(ListPublicIpAddressesCmd.class);
Mockito.when(cmd.getState()).thenReturn("InvalidState");
spy.getStatesForIpAddressSearch(cmd);
}
@Test
public void getStatesForIpAddressSearchHandlesMixedValidAndInvalidStates() {
ListPublicIpAddressesCmd cmd = Mockito.mock(ListPublicIpAddressesCmd.class);
Mockito.when(cmd.getState()).thenReturn("Allocated,InvalidState");
try {
spy.getStatesForIpAddressSearch(cmd);
Assert.fail("Expected InvalidParameterValueException to be thrown");
} catch (InvalidParameterValueException e) {
Assert.assertEquals("Invalid state: InvalidState", e.getMessage());
}
}
} }

View File

@ -1545,7 +1545,7 @@ public class VolumeApiServiceImplTest {
volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock); volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock);
usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(), usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(),
null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.isDisplay())); null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.getInstanceId(), volumeVoMock.isDisplay()));
} }
} }
@ -1558,7 +1558,7 @@ public class VolumeApiServiceImplTest {
volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock); volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock);
usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(), usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(),
null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.isDisplay())); null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.getInstanceId(), volumeVoMock.isDisplay()));
} }
} }
@ -1573,7 +1573,7 @@ public class VolumeApiServiceImplTest {
volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock); volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock);
usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(), usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(),
null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.isDisplay())); null, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.getInstanceId(), volumeVoMock.isDisplay()));
} }
} }
@ -1589,7 +1589,7 @@ public class VolumeApiServiceImplTest {
volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock); volumeApiServiceImpl.publishVolumeCreationUsageEvent(volumeVoMock);
usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(), usageEventUtilsMocked.verify(() -> UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, volumeVoMock.getAccountId(), volumeVoMock.getDataCenterId(), volumeVoMock.getId(), volumeVoMock.getName(),
offeringMockId, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.isDisplay())); offeringMockId, volumeVoMock.getTemplateId(), volumeVoMock.getSize(), Volume.class.getName(), volumeVoMock.getUuid(), volumeVoMock.getInstanceId(), volumeVoMock.isDisplay()));
} }
} }

View File

@ -1120,10 +1120,12 @@ public class UserVmManagerImplTest {
public void recoverRootVolumeTestDestroyState() { public void recoverRootVolumeTestDestroyState() {
Mockito.doReturn(Volume.State.Destroy).when(volumeVOMock).getState(); Mockito.doReturn(Volume.State.Destroy).when(volumeVOMock).getState();
userVmManagerImpl.recoverRootVolume(volumeVOMock, vmId); try (MockedStatic<UsageEventUtils> ignored = Mockito.mockStatic(UsageEventUtils.class)) {
userVmManagerImpl.recoverRootVolume(volumeVOMock, vmId);
Mockito.verify(volumeApiService).recoverVolume(volumeVOMock.getId()); Mockito.verify(volumeApiService).recoverVolume(volumeVOMock.getId());
Mockito.verify(volumeDaoMock).attachVolume(volumeVOMock.getId(), vmId, UserVmManagerImpl.ROOT_DEVICE_ID); Mockito.verify(volumeDaoMock).attachVolume(volumeVOMock.getId(), vmId, UserVmManagerImpl.ROOT_DEVICE_ID);
}
} }
@Test(expected = InvalidParameterValueException.class) @Test(expected = InvalidParameterValueException.class)

View File

@ -199,6 +199,10 @@ for full help
self.info("No mysql root user specified, will not create Cloud DB schema\n", None) self.info("No mysql root user specified, will not create Cloud DB schema\n", None)
return return
if self.areCloudDatabasesCreated() and not self.options.schemaonly and not self.options.forcerecreate:
self.errorAndExit("Aborting script as the databases (cloud, cloud_usage) already exist.\n" \
"Please use the --force-recreate parameter if you want to recreate the databases and schemas, or use --schema-only if you only want to create the schemas only.")
replacements = ( replacements = (
("CREATE USER cloud identified by 'cloud';", ("CREATE USER cloud identified by 'cloud';",
"CREATE USER %s@`localhost` identified by '%s'; CREATE USER %s@`%%` identified by '%s';"%( "CREATE USER %s@`localhost` identified by '%s'; CREATE USER %s@`%%` identified by '%s';"%(
@ -239,10 +243,6 @@ for full help
("DROP USER 'cloud'@'%' ;", "DO NULL;") ("DROP USER 'cloud'@'%' ;", "DO NULL;")
) )
if self.areCloudDatabasesCreated() and not self.options.forcerecreate:
self.errorAndExit("Aborting script as the databases (cloud, cloud_usage) already exist.\n" \
"Please use the --force-recreate parameter if you want to recreate the schemas.")
scriptsToRun = ["create-database","create-schema", "create-database-premium","create-schema-premium"] scriptsToRun = ["create-database","create-schema", "create-database-premium","create-schema-premium"]
if self.options.schemaonly: if self.options.schemaonly:
scriptsToRun = ["create-schema", "create-schema-premium"] scriptsToRun = ["create-schema", "create-schema-premium"]
@ -617,11 +617,11 @@ for example:
self.parser.add_option("-d", "--deploy-as", action="store", type="string", dest="rootcreds", default="", self.parser.add_option("-d", "--deploy-as", action="store", type="string", dest="rootcreds", default="",
help="Colon-separated user name and password of a MySQL user with administrative privileges") help="Colon-separated user name and password of a MySQL user with administrative privileges")
self.parser.add_option("-s", "--schema-only", action="store_true", dest="schemaonly", default=False, self.parser.add_option("-s", "--schema-only", action="store_true", dest="schemaonly", default=False,
help="Creates the db schema without having to pass root credentials - " \ help="Creates the db schema only without having to pass root credentials - " \
"Please note: The databases (cloud, cloud_usage) and user (cloud) has to be configured " \ "Please note: The databases (cloud, cloud_usage) and user (cloud) has to be configured " \
"manually prior to running this script when using this flag.") "manually prior to running this script when using this flag.")
self.parser.add_option("--force-recreate", action="store_true", dest="forcerecreate", default=False, self.parser.add_option("--force-recreate", action="store_true", dest="forcerecreate", default=False,
help="Force recreation of the existing DB schemas. This option is disabled by default." \ help="Force recreation of the existing DB databases and schemas. This option is disabled by default." \
"Please note: The databases (cloud, cloud_usage) and its tables data will be lost and recreated.") "Please note: The databases (cloud, cloud_usage) and its tables data will be lost and recreated.")
self.parser.add_option("-a", "--auto", action="store", type="string", dest="serversetup", default="", self.parser.add_option("-a", "--auto", action="store", type="string", dest="serversetup", default="",

View File

@ -60,6 +60,7 @@
<include>log4j-cloud.xml</include> <include>log4j-cloud.xml</include>
<include>consoleproxy.properties</include> <include>consoleproxy.properties</include>
<include>agent.properties</include> <include>agent.properties</include>
<include>uefi.properties</include>
</includes> </includes>
</fileSet> </fileSet>
<fileSet> <fileSet>

View File

@ -3121,7 +3121,7 @@
"message.change.offering.confirm": "Please confirm that you wish to change the service offering of this virtual Instance.", "message.change.offering.confirm": "Please confirm that you wish to change the service offering of this virtual Instance.",
"message.change.offering.for.volume": "Successfully changed offering for the volume", "message.change.offering.for.volume": "Successfully changed offering for the volume",
"message.change.offering.for.volume.failed": "Change offering for the volume failed", "message.change.offering.for.volume.failed": "Change offering for the volume failed",
"message.change.offering.processing": "Changing offering for the volume...", "message.change.offering.for.volume.processing": "Changing offering for the volume...",
"message.change.password": "Please change your password.", "message.change.password": "Please change your password.",
"message.change.scope.failed": "Scope change failed", "message.change.scope.failed": "Scope change failed",
"message.change.scope.processing": "Scope change in progress", "message.change.scope.processing": "Scope change in progress",

View File

@ -71,9 +71,14 @@ export default {
if (this.$route.meta.name === 'iso') { if (this.$route.meta.name === 'iso') {
this.imageApi = 'listIsos' this.imageApi = 'listIsos'
} }
setTimeout(() => { this.fetchData()
this.fetchData() },
}, 100) watch: {
resource (newValue) {
if (newValue?.id) {
this.fetchData()
}
}
}, },
computed: { computed: {
allowed () { allowed () {
@ -82,23 +87,22 @@ export default {
} }
}, },
methods: { methods: {
arrayHasItems (array) {
return array !== null && array !== undefined && Array.isArray(array) && array.length > 0
},
fetchData () { fetchData () {
this.fetchResourceData() this.fetchResourceData()
}, },
fetchResourceData () { fetchResourceData () {
const params = {} if (!this.resource || !this.resource.id) {
params.id = this.resource.id return
params.templatefilter = 'executable' }
params.listall = true const params = {
params.page = this.page id: this.resource.id,
params.pagesize = this.pageSize templatefilter: 'executable',
listall: true
}
this.dataSource = [] this.dataSource = []
this.itemCount = 0 this.itemCount = 0
this.fetchLoading = true this.loading = true
this.zones = [] this.zones = []
getAPI(this.imageApi, params).then(json => { getAPI(this.imageApi, params).then(json => {
const imageResponse = json?.[this.imageApi.toLowerCase() + 'response']?.[this.$route.meta.name] || [] const imageResponse = json?.[this.imageApi.toLowerCase() + 'response']?.[this.$route.meta.name] || []
@ -108,8 +112,8 @@ export default {
})) }))
}).catch(error => { }).catch(error => {
this.$notifyError(error) this.$notifyError(error)
this.loading = false
}).finally(() => { }).finally(() => {
this.loading = false
if (this.zones.length !== 0) { if (this.zones.length !== 0) {
this.$emit('update-zones', this.zones) this.$emit('update-zones', this.zones)
} }
@ -122,7 +126,8 @@ export default {
} }
const zoneids = this.zones.map(z => z.id) const zoneids = this.zones.map(z => z.id)
this.loading = true this.loading = true
getAPI('listZones', { showicon: true, ids: zoneids.join(',') }).then(json => { const params = { showicon: true, ids: zoneids.join(',') }
getAPI('listZones', params).then(json => {
this.zones = json.listzonesresponse.zone || [] this.zones = json.listzonesresponse.zone || []
}).finally(() => { }).finally(() => {
this.loading = false this.loading = false

View File

@ -25,7 +25,8 @@
@search="handleSearch" /> @search="handleSearch" />
<ConfigurationTable <ConfigurationTable
:columns="columns" :columns="columns"
:config="items" /> :config="items"
:resource="resource" />
</a-col> </a-col>
</div> </div>
</template> </template>

View File

@ -43,6 +43,7 @@
- defaultOption (Object, optional): Preselected object to include initially - defaultOption (Object, optional): Preselected object to include initially
- showIcon (Boolean, optional): Whether to show icon for the options. Default is true - showIcon (Boolean, optional): Whether to show icon for the options. Default is true
- defaultIcon (String, optional): Icon to be shown when there is no resource icon for the option. Default is 'cloud-outlined' - defaultIcon (String, optional): Icon to be shown when there is no resource icon for the option. Default is 'cloud-outlined'
- autoSelectFirstOption (Boolean, optional): Whether to automatically select the first option when options are loaded. Default is false
Events: Events:
- @change-option-value (Function): Emits the selected option value(s) when value(s) changes. Do not use @change as it will give warnings and may not work - @change-option-value (Function): Emits the selected option value(s) when value(s) changes. Do not use @change as it will give warnings and may not work
@ -81,7 +82,7 @@
<resource-icon v-if="option.icon && option.icon.base64image" :image="option.icon.base64image" size="1x" style="margin-right: 5px"/> <resource-icon v-if="option.icon && option.icon.base64image" :image="option.icon.base64image" size="1x" style="margin-right: 5px"/>
<render-icon v-else :icon="defaultIcon" style="margin-right: 5px" /> <render-icon v-else :icon="defaultIcon" style="margin-right: 5px" />
</span> </span>
<span>{{ option[optionLabelKey] }}</span> <span>{{ optionLabelFn ? optionLabelFn(option) : option[optionLabelKey] }}</span>
</span> </span>
</a-select-option> </a-select-option>
</a-select> </a-select>
@ -120,6 +121,10 @@ export default {
type: String, type: String,
default: 'name' default: 'name'
}, },
optionLabelFn: {
type: Function,
default: null
},
defaultOption: { defaultOption: {
type: Object, type: Object,
default: null default: null
@ -135,6 +140,10 @@ export default {
pageSize: { pageSize: {
type: Number, type: Number,
default: null default: null
},
autoSelectFirstOption: {
type: Boolean,
default: false
} }
}, },
data () { data () {
@ -147,11 +156,12 @@ export default {
searchTimer: null, searchTimer: null,
scrollHandlerAttached: false, scrollHandlerAttached: false,
preselectedOptionValue: null, preselectedOptionValue: null,
successiveFetches: 0 successiveFetches: 0,
canSelectFirstOption: false
} }
}, },
created () { created () {
this.addDefaultOptionIfNeeded(true) this.addDefaultOptionIfNeeded()
}, },
mounted () { mounted () {
this.preselectedOptionValue = this.$attrs.value this.preselectedOptionValue = this.$attrs.value
@ -208,6 +218,7 @@ export default {
}).catch(error => { }).catch(error => {
this.$notifyError(error) this.$notifyError(error)
}).finally(() => { }).finally(() => {
this.canSelectFirstOption = true
if (this.successiveFetches === 0) { if (this.successiveFetches === 0) {
this.loading = false this.loading = false
} }
@ -218,6 +229,12 @@ export default {
(Array.isArray(this.preselectedOptionValue) && this.preselectedOptionValue.length === 0) || (Array.isArray(this.preselectedOptionValue) && this.preselectedOptionValue.length === 0) ||
this.successiveFetches >= this.maxSuccessiveFetches) { this.successiveFetches >= this.maxSuccessiveFetches) {
this.resetPreselectedOptionValue() this.resetPreselectedOptionValue()
if (!this.canSelectFirstOption && this.autoSelectFirstOption && this.options.length > 0) {
this.$nextTick(() => {
this.preselectedOptionValue = this.options[0][this.optionValueKey]
this.onChange(this.preselectedOptionValue)
})
}
return return
} }
const matchValue = Array.isArray(this.preselectedOptionValue) ? this.preselectedOptionValue[0] : this.preselectedOptionValue const matchValue = Array.isArray(this.preselectedOptionValue) ? this.preselectedOptionValue[0] : this.preselectedOptionValue
@ -239,6 +256,7 @@ export default {
}, },
addDefaultOptionIfNeeded () { addDefaultOptionIfNeeded () {
if (this.defaultOption) { if (this.defaultOption) {
this.canSelectFirstOption = true
this.options.push(this.defaultOption) this.options.push(this.defaultOption)
} }
}, },

View File

@ -29,7 +29,7 @@
</template> </template>
<template #select-option="{ item }"> <template #select-option="{ item }">
<span> <span>
<resource-icon v-if="item.icon && zone1.icon.base64image" :image="item.icon.base64image" size="2x" style="margin-right: 5px"/> <resource-icon v-if="item.icon && item.icon.base64image" :image="item.icon.base64image" size="2x" style="margin-right: 5px"/>
<global-outlined v-else style="margin-right: 5px" /> <global-outlined v-else style="margin-right: 5px" />
{{ item.name }} {{ item.name }}
</span> </span>

View File

@ -148,20 +148,17 @@
<a-alert :message="$t('message.action.acquire.ip')" type="warning" /> <a-alert :message="$t('message.action.acquire.ip')" type="warning" />
<a-form layout="vertical" style="margin-top: 10px"> <a-form layout="vertical" style="margin-top: 10px">
<a-form-item :label="$t('label.ipaddress')"> <a-form-item :label="$t('label.ipaddress')">
<a-select <infinite-scroll-select
v-focus="true" v-focus="true"
style="width: 100%;"
v-model:value="acquireIp" v-model:value="acquireIp"
showSearch api="listPublicIpAddresses"
optionFilterProp="label" :apiParams="listApiParamsForAssociate"
:filterOption="(input, option) => { resourceType="publicipaddress"
return option.label.toLowerCase().indexOf(input.toLowerCase()) >= 0 optionValueKey="ipaddress"
}" > :optionLabelFn="ip => ip.ipaddress + ' (' + ip.state + ')'"
<a-select-option defaultIcon="environment-outlined"
v-for="ip in listPublicIpAddress" :autoSelectFirstOption="true"
:key="ip.ipaddress" @change-option-value="(ip) => acquireIp = ip" />
:label="ip.ipaddress + '(' + ip.state + ')'">{{ ip.ipaddress }} ({{ ip.state }})</a-select-option>
</a-select>
</a-form-item> </a-form-item>
<div :span="24" class="action-button"> <div :span="24" class="action-button">
<a-button @click="onCloseModal">{{ $t('label.cancel') }}</a-button> <a-button @click="onCloseModal">{{ $t('label.cancel') }}</a-button>
@ -212,13 +209,15 @@ import Status from '@/components/widgets/Status'
import TooltipButton from '@/components/widgets/TooltipButton' import TooltipButton from '@/components/widgets/TooltipButton'
import BulkActionView from '@/components/view/BulkActionView' import BulkActionView from '@/components/view/BulkActionView'
import eventBus from '@/config/eventBus' import eventBus from '@/config/eventBus'
import InfiniteScrollSelect from '@/components/widgets/InfiniteScrollSelect'
export default { export default {
name: 'IpAddressesTab', name: 'IpAddressesTab',
components: { components: {
Status, Status,
TooltipButton, TooltipButton,
BulkActionView BulkActionView,
InfiniteScrollSelect
}, },
props: { props: {
resource: { resource: {
@ -281,7 +280,6 @@ export default {
showAcquireIp: false, showAcquireIp: false,
acquireLoading: false, acquireLoading: false,
acquireIp: null, acquireIp: null,
listPublicIpAddress: [],
changeSourceNat: false, changeSourceNat: false,
zoneExtNetProvider: '' zoneExtNetProvider: ''
} }
@ -302,6 +300,26 @@ export default {
} }
}, },
inject: ['parentFetchData'], inject: ['parentFetchData'],
computed: {
listApiParams () {
const params = {
zoneid: this.resource.zoneid,
domainid: this.resource.domainid,
account: this.resource.account,
forvirtualnetwork: true,
allocatedonly: false
}
if (['nsx', 'netris'].includes(this.zoneExtNetProvider?.toLowerCase())) {
params.forprovider = true
}
return params
},
listApiParamsForAssociate () {
const params = this.listApiParams
params.state = 'Free,Reserved'
return params
}
},
methods: { methods: {
fetchData () { fetchData () {
const params = { const params = {
@ -344,19 +362,9 @@ export default {
}).catch(reject) }).catch(reject)
}) })
}, },
fetchListPublicIpAddress () { fetchListPublicIpAddress (state) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const params = { getAPI('listPublicIpAddresses', this.listApiParams).then(json => {
zoneid: this.resource.zoneid,
domainid: this.resource.domainid,
account: this.resource.account,
forvirtualnetwork: true,
allocatedonly: false
}
if (['nsx', 'netris'].includes(this.zoneExtNetProvider?.toLowerCase())) {
params.forprovider = true
}
getAPI('listPublicIpAddresses', params).then(json => {
const listPublicIps = json.listpublicipaddressesresponse.publicipaddress || [] const listPublicIps = json.listpublicipaddressesresponse.publicipaddress || []
resolve(listPublicIps) resolve(listPublicIps)
}).catch(reject) }).catch(reject)
@ -554,30 +562,6 @@ export default {
}, },
async onShowAcquireIp () { async onShowAcquireIp () {
this.showAcquireIp = true this.showAcquireIp = true
this.acquireLoading = true
this.listPublicIpAddress = []
try {
const listPublicIpAddress = await this.fetchListPublicIpAddress()
listPublicIpAddress.forEach(item => {
if (item.state === 'Free' || item.state === 'Reserved') {
this.listPublicIpAddress.push({
ipaddress: item.ipaddress,
state: item.state
})
}
})
this.listPublicIpAddress.sort(function (a, b) {
if (a.ipaddress < b.ipaddress) { return -1 }
if (a.ipaddress > b.ipaddress) { return 1 }
return 0
})
this.acquireIp = this.listPublicIpAddress && this.listPublicIpAddress.length > 0 ? this.listPublicIpAddress[0].ipaddress : null
this.acquireLoading = false
} catch (e) {
this.acquireLoading = false
this.$notifyError(e)
}
}, },
onCloseModal () { onCloseModal () {
this.showAcquireIp = false this.showAcquireIp = false

View File

@ -32,7 +32,7 @@
<b> {{record.displaytext }} </b> {{ ' (' + record.name + ')' }} <br/> {{ record.description }} <b> {{record.displaytext }} </b> {{ ' (' + record.name + ')' }} <br/> {{ record.description }}
</template> </template>
<template v-if="column.key === 'value'"> <template v-if="column.key === 'value'">
<ConfigurationValue :configrecord="record" /> <ConfigurationValue :configrecord="record" :resource="resource" />
</template> </template>
</template> </template>
</a-table> </a-table>
@ -85,6 +85,10 @@ export default {
pagesize: { pagesize: {
type: Number, type: Number,
default: 20 default: 20
},
resource: {
type: Object,
required: false
} }
}, },
data () { data () {

View File

@ -217,6 +217,10 @@ export default {
actions: { actions: {
type: Array, type: Array,
default: () => [] default: () => []
},
resource: {
type: Object,
required: false
} }
}, },
data () { data () {
@ -254,6 +258,12 @@ export default {
this.setConfigData() this.setConfigData()
}, },
watch: { watch: {
configrecord: {
handler () {
this.setConfigData()
},
deep: true
}
}, },
methods: { methods: {
setConfigData () { setConfigData () {
@ -280,6 +290,9 @@ export default {
name: configrecord.name, name: configrecord.name,
value: newValue value: newValue
} }
if (this.scopeKey === 'domainid' && !params[this.scopeKey]) {
params[this.scopeKey] = this.resource?.id
}
postAPI('updateConfiguration', params).then(json => { postAPI('updateConfiguration', params).then(json => {
this.editableValue = this.getEditableValue(json.updateconfigurationresponse.configuration) this.editableValue = this.getEditableValue(json.updateconfigurationresponse.configuration)
this.actualValue = this.editableValue this.actualValue = this.editableValue
@ -315,6 +328,9 @@ export default {
[this.scopeKey]: this.$route.params?.id, [this.scopeKey]: this.$route.params?.id,
name: configrecord.name name: configrecord.name
} }
if (this.scopeKey === 'domainid' && !params[this.scopeKey]) {
params[this.scopeKey] = this.resource?.id
}
postAPI('resetConfiguration', params).then(json => { postAPI('resetConfiguration', params).then(json => {
this.editableValue = this.getEditableValue(json.resetconfigurationresponse.configuration) this.editableValue = this.getEditableValue(json.resetconfigurationresponse.configuration)
this.actualValue = this.editableValue this.actualValue = this.editableValue

View File

@ -1008,7 +1008,12 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
private boolean isVolumeEvent(String eventType) { private boolean isVolumeEvent(String eventType) {
return eventType != null && return eventType != null &&
(eventType.equals(EventTypes.EVENT_VOLUME_CREATE) || eventType.equals(EventTypes.EVENT_VOLUME_DELETE) || eventType.equals(EventTypes.EVENT_VOLUME_RESIZE) || eventType.equals(EventTypes.EVENT_VOLUME_UPLOAD)); (eventType.equals(EventTypes.EVENT_VOLUME_CREATE) ||
eventType.equals(EventTypes.EVENT_VOLUME_DELETE) ||
eventType.equals(EventTypes.EVENT_VOLUME_RESIZE) ||
eventType.equals(EventTypes.EVENT_VOLUME_UPLOAD) ||
eventType.equals(EventTypes.EVENT_VOLUME_ATTACH) ||
eventType.equals(EventTypes.EVENT_VOLUME_DETACH));
} }
private boolean isTemplateEvent(String eventType) { private boolean isTemplateEvent(String eventType) {
@ -1424,92 +1429,112 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
} }
} }
private void deleteExistingSecondaryStorageUsageForVolume(long volId, long accountId, Date deletedDate) {
List<UsageStorageVO> storageVOs = _usageStorageDao.listById(accountId, volId, StorageTypes.VOLUME);
for (UsageStorageVO storageVO : storageVOs) {
logger.debug("Setting the volume with id: {} to 'deleted' in the usage_storage table for account: {}.", volId, accountId);
storageVO.setDeleted(deletedDate);
_usageStorageDao.update(storageVO);
}
}
private void deleteExistingInstanceVolumeUsage(long volId, long accountId, Date deletedDate) {
List<UsageVolumeVO> volumesVOs = _usageVolumeDao.listByVolumeId(volId, accountId);
for (UsageVolumeVO volumesVO : volumesVOs) {
if (volumesVO.getVmId() != null) {
logger.debug("Setting the volume with id: {} for instance id: {} to 'deleted' in the usage_volume table for account {}.",
volumesVO.getVolumeId(), volumesVO.getVmId(), accountId);
volumesVO.setDeleted(deletedDate);
_usageVolumeDao.update(volumesVO.getId(), volumesVO);
}
}
}
private void deleteExistingVolumeUsage(long volId, long accountId, Date deletedDate) {
List<UsageVolumeVO> volumesVOs = _usageVolumeDao.listByVolumeId(volId, accountId);
for (UsageVolumeVO volumesVO : volumesVOs) {
logger.debug("Setting the volume with id: {} to 'deleted' in the usage_volume table for account: {}.", volId, accountId);
volumesVO.setDeleted(deletedDate);
_usageVolumeDao.update(volumesVO.getId(), volumesVO);
}
}
private void createVolumeHelperEvent(UsageEventVO event) { private void createVolumeHelperEvent(UsageEventVO event) {
long volId = event.getResourceId(); long volId = event.getResourceId();
Account acct = _accountDao.findByIdIncludingRemoved(event.getAccountId());
List<UsageVolumeVO> volumesVOs;
UsageVolumeVO volumeVO;
if (EventTypes.EVENT_VOLUME_CREATE.equals(event.getType())) { switch (event.getType()) {
//For volumes which are 'attached' successfully, set the 'deleted' column in the usage_storage table, case EventTypes.EVENT_VOLUME_CREATE:
//For volumes which are 'attached' successfully from uploaded state, set the 'deleted' column in the usage_storage table,
//so that the secondary storage should stop accounting and only primary will be accounted. //so that the secondary storage should stop accounting and only primary will be accounted.
SearchCriteria<UsageStorageVO> sc = _usageStorageDao.createSearchCriteria(); deleteExistingSecondaryStorageUsageForVolume(volId, event.getAccountId(), event.getCreateDate());
sc.addAnd("entityId", SearchCriteria.Op.EQ, volId);
sc.addAnd("storageType", SearchCriteria.Op.EQ, StorageTypes.VOLUME); volumesVOs = _usageVolumeDao.listByVolumeId(volId, event.getAccountId());
List<UsageStorageVO> volumesVOs = _usageStorageDao.search(sc, null);
if (volumesVOs != null) {
if (volumesVOs.size() == 1) {
logger.debug("Setting the volume with id: " + volId + " to 'deleted' in the usage_storage table.");
volumesVOs.get(0).setDeleted(event.getCreateDate());
_usageStorageDao.update(volumesVOs.get(0));
}
}
}
if (EventTypes.EVENT_VOLUME_CREATE.equals(event.getType()) || EventTypes.EVENT_VOLUME_RESIZE.equals(event.getType())) {
SearchCriteria<UsageVolumeVO> sc = _usageVolumeDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("volumeId", SearchCriteria.Op.EQ, volId);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageVolumeVO> volumesVOs = _usageVolumeDao.search(sc, null);
if (volumesVOs.size() > 0) { if (volumesVOs.size() > 0) {
//This is a safeguard to avoid double counting of volumes. //This is a safeguard to avoid double counting of volumes.
logger.error("Found duplicate usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking as deleted..."); logger.error("Found duplicate usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking as deleted...");
deleteExistingVolumeUsage(volId, event.getAccountId(), event.getCreateDate());
} }
//an entry exists if it is a resize volume event. marking the existing deleted and creating a new one in the case of resize.
for (UsageVolumeVO volumesVO : volumesVOs) { logger.debug("Creating a new entry in usage_volume for volume with id: {} for account: {}", volId, event.getAccountId());
if (logger.isDebugEnabled()) { volumeVO = new UsageVolumeVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), event.getOfferingId(), event.getTemplateId(), null, event.getSize(), event.getCreateDate(), null);
logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId());
}
volumesVO.setDeleted(event.getCreateDate());
_usageVolumeDao.update(volumesVO);
}
if (logger.isDebugEnabled()) {
logger.debug("create volume with id : " + volId + " for account: " + event.getAccountId());
}
Account acct = _accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageVolumeVO volumeVO = new UsageVolumeVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), event.getOfferingId(), event.getTemplateId(), event.getSize(), event.getCreateDate(), null);
_usageVolumeDao.persist(volumeVO); _usageVolumeDao.persist(volumeVO);
} else if (EventTypes.EVENT_VOLUME_DELETE.equals(event.getType())) {
SearchCriteria<UsageVolumeVO> sc = _usageVolumeDao.createSearchCriteria(); if (event.getVmId() != null) {
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId()); volumeVO = new UsageVolumeVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), event.getOfferingId(), event.getTemplateId(), event.getVmId(), event.getSize(), event.getCreateDate(), null);
sc.addAnd("volumeId", SearchCriteria.Op.EQ, volId); _usageVolumeDao.persist(volumeVO);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageVolumeVO> volumesVOs = _usageVolumeDao.search(sc, null);
if (volumesVOs.size() > 1) {
logger.warn("More that one usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
} }
break;
case EventTypes.EVENT_VOLUME_RESIZE:
volumesVOs = _usageVolumeDao.listByVolumeId(volId, event.getAccountId());
for (UsageVolumeVO volumesVO : volumesVOs) { for (UsageVolumeVO volumesVO : volumesVOs) {
if (logger.isDebugEnabled()) { String delete_msg = String.format("Setting the volume with id: %s to 'deleted' in the usage_volume table for account: %s.", volId, event.getAccountId());
logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId()); String create_msg = String.format("Creating a new entry in usage_volume for volume with id: %s after resize for account: %s", volId, event.getAccountId());
} Long vmId = volumesVO.getVmId();
volumesVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one if (vmId != null) {
_usageVolumeDao.update(volumesVO); delete_msg = String.format("Setting the volume with id: %s for instance id: %s to 'deleted' in the usage_volume table for account: %s.",
} volId, vmId, event.getAccountId());
} else if (EventTypes.EVENT_VOLUME_UPLOAD.equals(event.getType())) { create_msg = String.format("Creating a new entry in usage_volume for volume with id: %s and instance id: %s after resize for account: %s",
//For Upload event add an entry to the usage_storage table. volId, vmId, event.getAccountId());
SearchCriteria<UsageStorageVO> sc = _usageStorageDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("entityId", SearchCriteria.Op.EQ, volId);
sc.addAnd("storageType", SearchCriteria.Op.EQ, StorageTypes.VOLUME);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageStorageVO> volumesVOs = _usageStorageDao.search(sc, null);
if (volumesVOs.size() > 0) {
//This is a safeguard to avoid double counting of volumes.
logger.error("Found duplicate usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking as deleted...");
}
for (UsageStorageVO volumesVO : volumesVOs) {
if (logger.isDebugEnabled()) {
logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId());
} }
logger.debug(delete_msg);
volumesVO.setDeleted(event.getCreateDate()); volumesVO.setDeleted(event.getCreateDate());
_usageStorageDao.update(volumesVO); _usageVolumeDao.update(volumesVO.getId(), volumesVO);
}
if (logger.isDebugEnabled()) { logger.debug(create_msg);
logger.debug("create volume with id : " + volId + " for account: " + event.getAccountId()); volumeVO = new UsageVolumeVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), event.getOfferingId(), event.getTemplateId(), vmId, event.getSize(), event.getCreateDate(), null);
_usageVolumeDao.persist(volumeVO);
} }
Account acct = _accountDao.findByIdIncludingRemoved(event.getAccountId()); break;
UsageStorageVO volumeVO = new UsageStorageVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), StorageTypes.VOLUME, event.getTemplateId(), event.getSize(), event.getCreateDate(), null);
_usageStorageDao.persist(volumeVO); case EventTypes.EVENT_VOLUME_DELETE:
deleteExistingVolumeUsage(volId, event.getAccountId(), event.getCreateDate());
break;
case EventTypes.EVENT_VOLUME_ATTACH:
deleteExistingInstanceVolumeUsage(event.getResourceId(), event.getAccountId(), event.getCreateDate());
logger.debug("Creating a new entry in usage_volume for volume with id: {}, and instance id: {} for account: {}",
volId, event.getVmId(), event.getAccountId());
volumeVO = new UsageVolumeVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), event.getOfferingId(), event.getTemplateId(), event.getVmId(), event.getSize(), event.getCreateDate(), null);
_usageVolumeDao.persist(volumeVO);
break;
case EventTypes.EVENT_VOLUME_DETACH:
deleteExistingInstanceVolumeUsage(event.getResourceId(), event.getAccountId(), event.getCreateDate());
break;
case EventTypes.EVENT_VOLUME_UPLOAD:
deleteExistingSecondaryStorageUsageForVolume(volId, event.getAccountId(), event.getCreateDate());
logger.debug("Creating a new entry in usage_storage for volume with id : {} for account: {}", volId, event.getAccountId());
UsageStorageVO storageVO = new UsageStorageVO(volId, event.getZoneId(), event.getAccountId(), acct.getDomainId(), StorageTypes.VOLUME, event.getTemplateId(), event.getSize(), event.getCreateDate(), null);
_usageStorageDao.persist(storageVO);
break;
} }
} }

View File

@ -73,12 +73,13 @@ public class VolumeUsageParser extends UsageParser {
for (UsageVolumeVO usageVol : usageUsageVols) { for (UsageVolumeVO usageVol : usageUsageVols) {
long volId = usageVol.getVolumeId(); long volId = usageVol.getVolumeId();
Long doId = usageVol.getDiskOfferingId(); Long doId = usageVol.getDiskOfferingId();
Long vmId = usageVol.getVmId();
long zoneId = usageVol.getZoneId(); long zoneId = usageVol.getZoneId();
Long templateId = usageVol.getTemplateId(); Long templateId = usageVol.getTemplateId();
long size = usageVol.getSize(); long size = usageVol.getSize();
String key = volId + "-" + doId + "-" + size; String key = volId + "-" + doId + "-" + vmId + "-" + size;
diskOfferingMap.put(key, new VolInfo(volId, zoneId, doId, templateId, size)); diskOfferingMap.put(key, new VolInfo(volId, zoneId, doId, templateId, size, vmId));
Date volCreateDate = usageVol.getCreated(); Date volCreateDate = usageVol.getCreated();
Date volDeleteDate = usageVol.getDeleted(); Date volDeleteDate = usageVol.getDeleted();
@ -110,7 +111,7 @@ public class VolumeUsageParser extends UsageParser {
if (useTime > 0L) { if (useTime > 0L) {
VolInfo info = diskOfferingMap.get(volIdKey); VolInfo info = diskOfferingMap.get(volIdKey);
createUsageRecord(UsageTypes.VOLUME, useTime, startDate, endDate, account, info.getVolumeId(), info.getZoneId(), info.getDiskOfferingId(), createUsageRecord(UsageTypes.VOLUME, useTime, startDate, endDate, account, info.getVolumeId(), info.getZoneId(), info.getDiskOfferingId(),
info.getTemplateId(), info.getSize()); info.getTemplateId(), info.getVmId(), info.getSize());
} }
} }
@ -130,7 +131,7 @@ public class VolumeUsageParser extends UsageParser {
} }
private void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long volId, long zoneId, Long doId, private void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long volId, long zoneId, Long doId,
Long templateId, long size) { Long templateId, Long vmId, long size) {
// Our smallest increment is hourly for now // Our smallest increment is hourly for now
logger.debug("Total running time {} ms", runningTime); logger.debug("Total running time {} ms", runningTime);
@ -152,7 +153,11 @@ public class VolumeUsageParser extends UsageParser {
usageDesc += " (DiskOffering: " + doId + ")"; usageDesc += " (DiskOffering: " + doId + ")";
} }
UsageVO usageRecord = new UsageVO(zoneId, account.getId(), account.getDomainId(), usageDesc, usageDisplay + " Hrs", type, new Double(usage), null, null, doId, templateId, volId, if (vmId != null) {
usageDesc += " (VM: " + vmId + ")";
}
UsageVO usageRecord = new UsageVO(zoneId, account.getId(), account.getDomainId(), usageDesc, usageDisplay + " Hrs", type, new Double(usage), vmId, null, doId, templateId, volId,
size, startDate, endDate); size, startDate, endDate);
usageDao.persist(usageRecord); usageDao.persist(usageRecord);
} }
@ -163,13 +168,15 @@ public class VolumeUsageParser extends UsageParser {
private Long diskOfferingId; private Long diskOfferingId;
private Long templateId; private Long templateId;
private long size; private long size;
private Long vmId;
public VolInfo(long volId, long zoneId, Long diskOfferingId, Long templateId, long size) { public VolInfo(long volId, long zoneId, Long diskOfferingId, Long templateId, long size, Long vmId) {
this.volId = volId; this.volId = volId;
this.zoneId = zoneId; this.zoneId = zoneId;
this.diskOfferingId = diskOfferingId; this.diskOfferingId = diskOfferingId;
this.templateId = templateId; this.templateId = templateId;
this.size = size; this.size = size;
this.vmId = vmId;
} }
public long getZoneId() { public long getZoneId() {
@ -191,5 +198,9 @@ public class VolumeUsageParser extends UsageParser {
public long getSize() { public long getSize() {
return size; return size;
} }
public Long getVmId() {
return vmId;
}
} }
} }

View File

@ -19,7 +19,7 @@
package com.cloud.utils; package com.cloud.utils;
public class EnumUtils { public class EnumUtils extends org.apache.commons.lang3.EnumUtils {
public static String listValues(Enum<?>[] enums) { public static String listValues(Enum<?>[] enums) {
StringBuilder b = new StringBuilder("["); StringBuilder b = new StringBuilder("[");

View File

@ -67,11 +67,13 @@ public final class ProcessRunner {
public ProcessRunner(ExecutorService executor) { public ProcessRunner(ExecutorService executor) {
this.executor = executor; this.executor = executor;
commandLogReplacements.add(new Ternary<>("ipmitool", "-P\\s+\\S+", "-P *****")); commandLogReplacements.add(new Ternary<>("ipmitool", "-P\\s+\\S+", "-P *****"));
commandLogReplacements.add(new Ternary<>("ipmitool", "(?i)password\\s+\\S+\\s+\\S+", "password **** ****"));
} }
/** /**
* Executes a process with provided list of commands with a max default timeout * Executes a process with provided list of commands with a max default timeout
* of 5 minutes * of 5 minutes
*
* @param commands list of string commands * @param commands list of string commands
* @return returns process result * @return returns process result
*/ */
@ -82,6 +84,7 @@ public final class ProcessRunner {
/** /**
* Executes a process with provided list of commands with a given timeout that is less * Executes a process with provided list of commands with a given timeout that is less
* than or equal to DEFAULT_MAX_TIMEOUT * than or equal to DEFAULT_MAX_TIMEOUT
*
* @param commands list of string commands * @param commands list of string commands
* @param timeOut timeout duration * @param timeOut timeout duration
* @return returns process result * @return returns process result
@ -109,14 +112,16 @@ public final class ProcessRunner {
} }
}); });
try { try {
logger.debug("Waiting for a response from command [{}]. Defined timeout: [{}].", commandLog, timeOut.getStandardSeconds()); logger.debug("Waiting for a response from command [{}]. Defined timeout: [{}].", commandLog,
timeOut.getStandardSeconds());
retVal = processFuture.get(timeOut.getStandardSeconds(), TimeUnit.SECONDS); retVal = processFuture.get(timeOut.getStandardSeconds(), TimeUnit.SECONDS);
} catch (ExecutionException e) { } catch (ExecutionException e) {
logger.warn("Failed to complete the requested command [{}] due to execution error.", commands, e); logger.warn("Failed to complete the requested command [{}] due to execution error.", commandLog, e);
retVal = -2; retVal = -2;
stdError = e.getMessage(); stdError = e.getMessage();
} catch (TimeoutException e) { } catch (TimeoutException e) {
logger.warn("Failed to complete the requested command [{}] within timeout. Defined timeout: [{}].", commandLog, timeOut.getStandardSeconds(), e); logger.warn("Failed to complete the requested command [{}] within timeout. Defined timeout: [{}].",
commandLog, timeOut.getStandardSeconds(), e);
retVal = -1; retVal = -1;
stdError = "Operation timed out, aborted."; stdError = "Operation timed out, aborted.";
} finally { } finally {

View File

@ -60,4 +60,16 @@ public class ProcessRunnerTest {
Assert.assertTrue(log.contains(password)); Assert.assertTrue(log.contains(password));
Assert.assertEquals(1, countSubstringOccurrences(log, password)); Assert.assertEquals(1, countSubstringOccurrences(log, password));
} }
@Test
public void testRemoveCommandSensitiveInfoForLoggingIpmiPasswordCommand() {
String userId = "3";
String newPassword = "Sup3rSecr3t!";
String command = String.format("/usr/bin/ipmitool user set password %s %s", userId, newPassword);
String log = processRunner.removeCommandSensitiveInfoForLogging(command);
Assert.assertFalse(log.contains(userId));
Assert.assertFalse(log.contains(newPassword));
Assert.assertTrue(log.contains("password **** ****"));
}
} }