Merge pull request #654 from DaanHoogland/CLOUDSTACK-8656

Cloudstack 8656: do away with more silently ignoring exceptions.a lot of messages added.
some restructuring for test exception assertions and try-with-resource blocks

* pr/654: (29 commits)
  CLOUDSTACK-8656: more logging instead of sysout
  CLOUDSTACK-8656: use catch block for validation
  CLOUDSTACK-8656: class in json specified not found
  CLOUDSTACK-8656: removed unused classes
  CLOUDSTACK-8656: restructure of tests
  CLOUDSTACK-8656: reorganise sychronized block
  CLOUDSTACK-8656: restructure tests to ensure exception throwing
  CLOUDSTACK-8656: validate the throwing of ServerApiException
  CLOUDSTACK-8656: logging ignored exceptions
  CLOUDSTACK-8656: try-w-r removes need for empty catch block
  CLOUDSTACK-8656: try-w-r instead of clunckey close-except
  CLOUDSTACK-8656: deal with empty SQLException catch block by try-w-r
  CLOUDSTACK-8656: unnecessary close construct removed
  CLOUDSTACK-8656: message about timed buffer logging
  CLOUDSTACK-8656: message about invalid number from store
  CLOUDSTACK-8656: move cli test tool to separate file
  CLOUDSTACK-8656: exception is the rule for some tests
  CLOUDSTACK-8656: network related exception logging
  CLOUDSTACK-8656: reporting ignored exceptions in server
  CLOUDSTACK-8656: log in case we are on a platform not supporting UTF8
  ...

Signed-off-by: Remi Bergsma <github@remi.nl>
This commit is contained in:
Remi Bergsma 2015-08-14 21:38:48 +02:00
commit 64ff67da55
44 changed files with 472 additions and 801 deletions

View File

@ -82,23 +82,14 @@ public class Upgrade40to41 implements DbUpgrade {
if (regionId != null) { if (regionId != null) {
region_id = Integer.parseInt(regionId); region_id = Integer.parseInt(regionId);
} }
PreparedStatement pstmt = null; try (PreparedStatement pstmt = conn.prepareStatement("update `cloud`.`region` set id = ?");) {
try {
//Update regionId in region table //Update regionId in region table
s_logger.debug("Updating region table with Id: " + region_id); s_logger.debug("Updating region table with Id: " + region_id);
pstmt = conn.prepareStatement("update `cloud`.`region` set id = ?");
pstmt.setInt(1, region_id); pstmt.setInt(1, region_id);
pstmt.executeUpdate(); pstmt.executeUpdate();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Error while updating region entries", e); throw new CloudRuntimeException("Error while updating region entries", e);
} finally {
try {
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
} }
} }

View File

@ -33,7 +33,6 @@ import java.util.Map;
import java.util.UUID; import java.util.UUID;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.cloudstack.api.ApiConstants; import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreProvider;
@ -1396,12 +1395,13 @@ public class Upgrade410to420 implements DbUpgrade {
// Corrects upgrade for deployment with F5 and SRX devices (pre 3.0) to network offering & // Corrects upgrade for deployment with F5 and SRX devices (pre 3.0) to network offering &
// network service provider paradigm // network service provider paradigm
private void correctExternalNetworkDevicesSetup(Connection conn) { private void correctExternalNetworkDevicesSetup(Connection conn) {
PreparedStatement zoneSearchStmt = null, pNetworkStmt = null, f5DevicesStmt = null, srxDevicesStmt = null; PreparedStatement pNetworkStmt = null, f5DevicesStmt = null, srxDevicesStmt = null;
ResultSet zoneResults = null, pNetworksResults = null, f5DevicesResult = null, srxDevicesResult = null; ResultSet pNetworksResults = null, f5DevicesResult = null, srxDevicesResult = null;
try { try (
zoneSearchStmt = conn.prepareStatement("SELECT id, networktype FROM `cloud`.`data_center`"); PreparedStatement zoneSearchStmt = conn.prepareStatement("SELECT id, networktype FROM `cloud`.`data_center`");
zoneResults = zoneSearchStmt.executeQuery(); ResultSet zoneResults = zoneSearchStmt.executeQuery();
){
while (zoneResults.next()) { while (zoneResults.next()) {
long zoneId = zoneResults.getLong(1); long zoneId = zoneResults.getLong(1);
String networkType = zoneResults.getString(2); String networkType = zoneResults.getString(2);
@ -1438,12 +1438,13 @@ public class Upgrade410to420 implements DbUpgrade {
} }
} }
PreparedStatement fetchSRXNspStmt = boolean hasSrxNsp = false;
try (PreparedStatement fetchSRXNspStmt =
conn.prepareStatement("SELECT id from `cloud`.`physical_network_service_providers` where physical_network_id=" + physicalNetworkId + conn.prepareStatement("SELECT id from `cloud`.`physical_network_service_providers` where physical_network_id=" + physicalNetworkId +
" and provider_name = 'JuniperSRX'"); " and provider_name = 'JuniperSRX'");
ResultSet rsSRXNSP = fetchSRXNspStmt.executeQuery(); ResultSet rsSRXNSP = fetchSRXNspStmt.executeQuery();) {
boolean hasSrxNsp = rsSRXNSP.next(); hasSrxNsp = rsSRXNSP.next();
fetchSRXNspStmt.close(); }
// if there is no 'JuniperSRX' physical network service provider added into physical network then // if there is no 'JuniperSRX' physical network service provider added into physical network then
// add 'JuniperSRX' as network service provider and add the entry in 'external_firewall_devices' // add 'JuniperSRX' as network service provider and add the entry in 'external_firewall_devices'
@ -1466,24 +1467,8 @@ public class Upgrade410to420 implements DbUpgrade {
// not the network service provider has been provisioned in to physical network, mark all guest network // not the network service provider has been provisioned in to physical network, mark all guest network
// to be using network offering 'Isolated with external providers' // to be using network offering 'Isolated with external providers'
fixZoneUsingExternalDevices(conn); fixZoneUsingExternalDevices(conn);
if (zoneResults != null) {
try {
zoneResults.close();
} catch (SQLException e) {
}
}
if (zoneSearchStmt != null) {
try {
zoneSearchStmt.close();
} catch (SQLException e) {
}
}
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Exception while adding PhysicalNetworks", e); throw new CloudRuntimeException("Exception while adding PhysicalNetworks", e);
} finally {
} }
} }
@ -1762,39 +1747,37 @@ public class Upgrade410to420 implements DbUpgrade {
// migrate secondary storages NFS from host tables to image_store table // migrate secondary storages NFS from host tables to image_store table
private void migrateSecondaryStorageToImageStore(Connection conn) { private void migrateSecondaryStorageToImageStore(Connection conn) {
PreparedStatement storeInsert = null; String sqlSelectS3Count = "select count(*) from `cloud`.`s3`";
PreparedStatement storeDetailInsert = null; String sqlSelectSwiftCount = "select count(*) from `cloud`.`swift`";
PreparedStatement nfsQuery = null; String sqlInsertStoreDetail = "INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)";
PreparedStatement pstmt = null; String sqlUpdateHostAsRemoved = "UPDATE `cloud`.`host` SET removed = now() WHERE type = 'SecondaryStorage' and removed is null";
ResultSet rs = null;
ResultSet storeInfo = null;
s_logger.debug("Migrating secondary storage to image store"); s_logger.debug("Migrating secondary storage to image store");
boolean hasS3orSwift = false; boolean hasS3orSwift = false;
try { try (
PreparedStatement pstmtSelectS3Count = conn.prepareStatement(sqlSelectS3Count);
PreparedStatement pstmtSelectSwiftCount = conn.prepareStatement(sqlSelectSwiftCount);
PreparedStatement storeDetailInsert = conn.prepareStatement(sqlInsertStoreDetail);
PreparedStatement storeInsert =
conn.prepareStatement("INSERT INTO `cloud`.`image_store` (id, uuid, name, image_provider_name, protocol, url, data_center_id, scope, role, parent, total_size, created) values(?, ?, ?, 'NFS', 'nfs', ?, ?, 'ZONE', ?, ?, ?, ?)");
PreparedStatement nfsQuery =
conn.prepareStatement("select id, uuid, url, data_center_id, parent, total_size, created from `cloud`.`host` where type = 'SecondaryStorage' and removed is null");
PreparedStatement pstmtUpdateHostAsRemoved = conn.prepareStatement(sqlUpdateHostAsRemoved);
ResultSet rsSelectS3Count = pstmtSelectS3Count.executeQuery();
ResultSet rsSelectSwiftCount = pstmtSelectSwiftCount.executeQuery();
ResultSet rsNfs = nfsQuery.executeQuery();
) {
s_logger.debug("Checking if we need to migrate NFS secondary storage to image store or staging store"); s_logger.debug("Checking if we need to migrate NFS secondary storage to image store or staging store");
int numRows = 0; int numRows = 0;
pstmt = conn.prepareStatement("select count(*) from `cloud`.`s3`"); if (rsSelectS3Count.next()) {
rs = pstmt.executeQuery(); numRows = rsSelectS3Count.getInt(1);
if (rs.next()) { }
numRows = rs.getInt(1); // check if there is swift storage
if (rsSelectSwiftCount.next()) {
numRows += rsSelectSwiftCount.getInt(1);
} }
rs.close();
pstmt.close();
if (numRows > 0) { if (numRows > 0) {
hasS3orSwift = true; hasS3orSwift = true;
} else {
// check if there is swift storage
pstmt = conn.prepareStatement("select count(*) from `cloud`.`swift`");
rs = pstmt.executeQuery();
if (rs.next()) {
numRows = rs.getInt(1);
}
rs.close();
pstmt.close();
if (numRows > 0) {
hasS3orSwift = true;
}
} }
String store_role = "Image"; String store_role = "Image";
@ -1804,23 +1787,15 @@ public class Upgrade410to420 implements DbUpgrade {
s_logger.debug("Migrating NFS secondary storage to " + store_role + " store"); s_logger.debug("Migrating NFS secondary storage to " + store_role + " store");
storeDetailInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)");
// migrate NFS secondary storage, for nfs, keep previous host_id as the store_id // migrate NFS secondary storage, for nfs, keep previous host_id as the store_id
storeInsert = while (rsNfs.next()) {
conn.prepareStatement("INSERT INTO `cloud`.`image_store` (id, uuid, name, image_provider_name, protocol, url, data_center_id, scope, role, parent, total_size, created) values(?, ?, ?, 'NFS', 'nfs', ?, ?, 'ZONE', ?, ?, ?, ?)"); Long nfs_id = rsNfs.getLong("id");
nfsQuery = String nfs_uuid = rsNfs.getString("uuid");
conn.prepareStatement("select id, uuid, url, data_center_id, parent, total_size, created from `cloud`.`host` where type = 'SecondaryStorage' and removed is null"); String nfs_url = rsNfs.getString("url");
rs = nfsQuery.executeQuery(); String nfs_parent = rsNfs.getString("parent");
int nfs_dcid = rsNfs.getInt("data_center_id");
while (rs.next()) { Long nfs_totalsize = rsNfs.getObject("total_size") != null ? rsNfs.getLong("total_size") : null;
Long nfs_id = rs.getLong("id"); Date nfs_created = rsNfs.getDate("created");
String nfs_uuid = rs.getString("uuid");
String nfs_url = rs.getString("url");
String nfs_parent = rs.getString("parent");
int nfs_dcid = rs.getInt("data_center_id");
Long nfs_totalsize = rs.getObject("total_size") != null ? rs.getLong("total_size") : null;
Date nfs_created = rs.getDate("created");
// insert entry in image_store table and image_store_details // insert entry in image_store table and image_store_details
// table and store host_id and store_id mapping // table and store host_id and store_id mapping
@ -1841,36 +1816,11 @@ public class Upgrade410to420 implements DbUpgrade {
} }
s_logger.debug("Marking NFS secondary storage in host table as removed"); s_logger.debug("Marking NFS secondary storage in host table as removed");
pstmt = conn.prepareStatement("UPDATE `cloud`.`host` SET removed = now() WHERE type = 'SecondaryStorage' and removed is null"); pstmtUpdateHostAsRemoved.executeUpdate();
pstmt.executeUpdate();
pstmt.close();
} catch (SQLException e) { } catch (SQLException e) {
String msg = "Unable to migrate secondary storages." + e.getMessage(); String msg = "Unable to migrate secondary storages." + e.getMessage();
s_logger.error(msg); s_logger.error(msg);
throw new CloudRuntimeException(msg, e); throw new CloudRuntimeException(msg, e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (storeInfo != null) {
storeInfo.close();
}
if (storeInsert != null) {
storeInsert.close();
}
if (storeDetailInsert != null) {
storeDetailInsert.close();
}
if (nfsQuery != null) {
nfsQuery.close();
}
if (pstmt != null) {
pstmt.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Completed migrating secondary storage to image store"); s_logger.debug("Completed migrating secondary storage to image store");
} }
@ -1947,26 +1897,21 @@ public class Upgrade410to420 implements DbUpgrade {
// migrate secondary storages S3 from s3 tables to image_store table // migrate secondary storages S3 from s3 tables to image_store table
private void migrateS3ToImageStore(Connection conn) { private void migrateS3ToImageStore(Connection conn) {
PreparedStatement storeInsert = null;
PreparedStatement storeDetailInsert = null;
PreparedStatement storeQuery = null;
PreparedStatement s3Query = null;
ResultSet rs = null;
ResultSet storeInfo = null;
Long storeId = null; Long storeId = null;
Map<Long, Long> s3_store_id_map = new HashMap<Long, Long>(); Map<Long, Long> s3_store_id_map = new HashMap<Long, Long>();
s_logger.debug("Migrating S3 to image store"); s_logger.debug("Migrating S3 to image store");
try { try (
storeQuery = conn.prepareStatement("select id from `cloud`.`image_store` where uuid = ?"); PreparedStatement storeQuery = conn.prepareStatement("select id from `cloud`.`image_store` where uuid = ?");
storeDetailInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)"); PreparedStatement storeDetailInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)");
// migrate S3 to image_store // migrate S3 to image_store
storeInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store` (uuid, name, image_provider_name, protocol, scope, role, created) " + PreparedStatement storeInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store` (uuid, name, image_provider_name, protocol, scope, role, created) " +
"values(?, ?, 'S3', ?, 'REGION', 'Image', ?)"); "values(?, ?, 'S3', ?, 'REGION', 'Image', ?)");
s3Query = conn.prepareStatement("select id, uuid, access_key, secret_key, end_point, bucket, https, connection_timeout, " + PreparedStatement s3Query = conn.prepareStatement("select id, uuid, access_key, secret_key, end_point, bucket, https, connection_timeout, " +
"max_error_retry, socket_timeout, created from `cloud`.`s3`"); "max_error_retry, socket_timeout, created from `cloud`.`s3`");
rs = s3Query.executeQuery(); ResultSet rs = s3Query.executeQuery();
) {
while (rs.next()) { while (rs.next()) {
Long s3_id = rs.getLong("id"); Long s3_id = rs.getLong("id");
@ -1991,9 +1936,10 @@ public class Upgrade410to420 implements DbUpgrade {
storeInsert.executeUpdate(); storeInsert.executeUpdate();
storeQuery.setString(1, s3_uuid); storeQuery.setString(1, s3_uuid);
storeInfo = storeQuery.executeQuery(); try (ResultSet storeInfo = storeQuery.executeQuery();) {
if (storeInfo.next()) { if (storeInfo.next()) {
storeId = storeInfo.getLong("id"); storeId = storeInfo.getLong("id");
}
} }
Map<String, String> detailMap = new HashMap<String, String>(); Map<String, String> detailMap = new HashMap<String, String>();
@ -2027,29 +1973,6 @@ public class Upgrade410to420 implements DbUpgrade {
String msg = "Unable to migrate S3 secondary storages." + e.getMessage(); String msg = "Unable to migrate S3 secondary storages." + e.getMessage();
s_logger.error(msg); s_logger.error(msg);
throw new CloudRuntimeException(msg, e); throw new CloudRuntimeException(msg, e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (storeInfo != null) {
storeInfo.close();
}
if (storeInsert != null) {
storeInsert.close();
}
if (storeDetailInsert != null) {
storeDetailInsert.close();
}
if (storeQuery != null) {
storeQuery.close();
}
if (s3Query != null) {
s3Query.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Migrating template_s3_ref to template_store_ref"); s_logger.debug("Migrating template_s3_ref to template_store_ref");
@ -2162,26 +2085,20 @@ public class Upgrade410to420 implements DbUpgrade {
// migrate secondary storages Swift from swift tables to image_store table // migrate secondary storages Swift from swift tables to image_store table
private void migrateSwiftToImageStore(Connection conn) { private void migrateSwiftToImageStore(Connection conn) {
PreparedStatement storeInsert = null;
PreparedStatement storeDetailInsert = null;
PreparedStatement storeQuery = null;
PreparedStatement swiftQuery = null;
ResultSet rs = null;
ResultSet storeInfo = null;
Long storeId = null; Long storeId = null;
Map<Long, Long> swift_store_id_map = new HashMap<Long, Long>(); Map<Long, Long> swift_store_id_map = new HashMap<Long, Long>();
s_logger.debug("Migrating Swift to image store"); s_logger.debug("Migrating Swift to image store");
try { try (
storeQuery = conn.prepareStatement("select id from `cloud`.`image_store` where uuid = ?"); PreparedStatement storeQuery = conn.prepareStatement("select id from `cloud`.`image_store` where uuid = ?");
storeDetailInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)"); PreparedStatement storeDetailInsert = conn.prepareStatement("INSERT INTO `cloud`.`image_store_details` (store_id, name, value) values(?, ?, ?)");
// migrate SWIFT secondary storage // migrate SWIFT secondary storage
storeInsert = PreparedStatement storeInsert =
conn.prepareStatement("INSERT INTO `cloud`.`image_store` (uuid, name, image_provider_name, protocol, url, scope, role, created) values(?, ?, 'Swift', 'http', ?, 'REGION', 'Image', ?)"); conn.prepareStatement("INSERT INTO `cloud`.`image_store` (uuid, name, image_provider_name, protocol, url, scope, role, created) values(?, ?, 'Swift', 'http', ?, 'REGION', 'Image', ?)");
swiftQuery = conn.prepareStatement("select id, uuid, url, account, username, swift.key, created from `cloud`.`swift`"); PreparedStatement swiftQuery = conn.prepareStatement("select id, uuid, url, account, username, swift.key, created from `cloud`.`swift`");
rs = swiftQuery.executeQuery(); ResultSet rs = swiftQuery.executeQuery();
) {
while (rs.next()) { while (rs.next()) {
Long swift_id = rs.getLong("id"); Long swift_id = rs.getLong("id");
String swift_uuid = rs.getString("uuid"); String swift_uuid = rs.getString("uuid");
@ -2200,9 +2117,10 @@ public class Upgrade410to420 implements DbUpgrade {
storeInsert.executeUpdate(); storeInsert.executeUpdate();
storeQuery.setString(1, swift_uuid); storeQuery.setString(1, swift_uuid);
storeInfo = storeQuery.executeQuery(); try (ResultSet storeInfo = storeQuery.executeQuery();) {
if (storeInfo.next()) { if (storeInfo.next()) {
storeId = storeInfo.getLong("id"); storeId = storeInfo.getLong("id");
}
} }
Map<String, String> detailMap = new HashMap<String, String>(); Map<String, String> detailMap = new HashMap<String, String>();
@ -2225,29 +2143,6 @@ public class Upgrade410to420 implements DbUpgrade {
String msg = "Unable to migrate swift secondary storages." + e.getMessage(); String msg = "Unable to migrate swift secondary storages." + e.getMessage();
s_logger.error(msg); s_logger.error(msg);
throw new CloudRuntimeException(msg, e); throw new CloudRuntimeException(msg, e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (storeInfo != null) {
storeInfo.close();
}
if (storeInsert != null) {
storeInsert.close();
}
if (storeDetailInsert != null) {
storeDetailInsert.close();
}
if (storeQuery != null) {
storeQuery.close();
}
if (swiftQuery != null) {
swiftQuery.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Migrating template_swift_ref to template_store_ref"); s_logger.debug("Migrating template_swift_ref to template_store_ref");
@ -2261,16 +2156,13 @@ public class Upgrade410to420 implements DbUpgrade {
// migrate template_s3_ref to template_store_ref // migrate template_s3_ref to template_store_ref
private void migrateTemplateSwiftRef(Connection conn, Map<Long, Long> swiftStoreMap) { private void migrateTemplateSwiftRef(Connection conn, Map<Long, Long> swiftStoreMap) {
PreparedStatement tmplStoreInsert = null;
PreparedStatement s3Query = null;
ResultSet rs = null;
s_logger.debug("Updating template_store_ref table from template_swift_ref table"); s_logger.debug("Updating template_store_ref table from template_swift_ref table");
try { try (
tmplStoreInsert = PreparedStatement tmplStoreInsert =
conn.prepareStatement("INSERT INTO `cloud`.`template_store_ref` (store_id, template_id, created, download_pct, size, physical_size, download_state, local_path, install_path, update_count, ref_cnt, store_role, state) values(?, ?, ?, 100, ?, ?, 'DOWNLOADED', '?', '?', 0, 0, 'Image', 'Ready')"); conn.prepareStatement("INSERT INTO `cloud`.`template_store_ref` (store_id, template_id, created, download_pct, size, physical_size, download_state, local_path, install_path, update_count, ref_cnt, store_role, state) values(?, ?, ?, 100, ?, ?, 'DOWNLOADED', '?', '?', 0, 0, 'Image', 'Ready')");
s3Query = conn.prepareStatement("select swift_id, template_id, created, path, size, physical_size from `cloud`.`template_swift_ref`"); PreparedStatement s3Query = conn.prepareStatement("select swift_id, template_id, created, path, size, physical_size from `cloud`.`template_swift_ref`");
rs = s3Query.executeQuery(); ResultSet rs = s3Query.executeQuery();
) {
while (rs.next()) { while (rs.next()) {
Long swift_id = rs.getLong("swift_id"); Long swift_id = rs.getLong("swift_id");
Long tmpl_id = rs.getLong("template_id"); Long tmpl_id = rs.getLong("template_id");
@ -2300,19 +2192,6 @@ public class Upgrade410to420 implements DbUpgrade {
String msg = "Unable to migrate template_swift_ref." + e.getMessage(); String msg = "Unable to migrate template_swift_ref." + e.getMessage();
s_logger.error(msg); s_logger.error(msg);
throw new CloudRuntimeException(msg, e); throw new CloudRuntimeException(msg, e);
} finally {
try {
if (rs != null) {
rs.close();
}
if (tmplStoreInsert != null) {
tmplStoreInsert.close();
}
if (s3Query != null) {
s3Query.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Completed migrating template_swift_ref table."); s_logger.debug("Completed migrating template_swift_ref table.");
} }
@ -2575,10 +2454,10 @@ public class Upgrade410to420 implements DbUpgrade {
private void upgradeResourceCount(Connection conn) { private void upgradeResourceCount(Connection conn) {
s_logger.debug("upgradeResourceCount start"); s_logger.debug("upgradeResourceCount start");
ResultSet rsAccount = null; try(
try( PreparedStatement sel_dom_pstmt = conn.prepareStatement("select id, domain_id FROM `cloud`.`account` where removed is NULL ");) PreparedStatement sel_dom_pstmt = conn.prepareStatement("select id, domain_id FROM `cloud`.`account` where removed is NULL ");
{ ResultSet rsAccount = sel_dom_pstmt.executeQuery();
rsAccount = sel_dom_pstmt.executeQuery(); ) {
while (rsAccount.next()) { while (rsAccount.next()) {
long account_id = rsAccount.getLong(1); long account_id = rsAccount.getLong(1);
long domain_id = rsAccount.getLong(2); long domain_id = rsAccount.getLong(2);
@ -2706,13 +2585,6 @@ public class Upgrade410to420 implements DbUpgrade {
s_logger.debug("upgradeResourceCount finish"); s_logger.debug("upgradeResourceCount finish");
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Unable to upgrade resource count (cpu,memory,primary_storage,secondary_storage) ", e); throw new CloudRuntimeException("Unable to upgrade resource count (cpu,memory,primary_storage,secondary_storage) ", e);
} finally {
try {
if (rsAccount != null) {
rsAccount.close();
}
} catch (SQLException e) {
}
} }
} }

View File

@ -131,80 +131,84 @@ public class Upgrade420to421 implements DbUpgrade {
private void upgradeResourceCount(Connection conn) { private void upgradeResourceCount(Connection conn) {
s_logger.debug("upgradeResourceCount start"); s_logger.debug("upgradeResourceCount start");
PreparedStatement pstmt1 = null; String sqlSelectAccountIds = "select id, domain_id FROM `cloud`.`account` where removed is NULL ";
PreparedStatement pstmt2 = null; String sqlSelectOfferingTotals = "SELECT SUM(service_offering.cpu), SUM(service_offering.ram_size)"
PreparedStatement pstmt3 = null; + " FROM `cloud`.`vm_instance`, `cloud`.`service_offering`"
PreparedStatement pstmt4 = null; + " WHERE vm_instance.service_offering_id = service_offering.id AND vm_instance.account_id = ?"
PreparedStatement pstmt5 = null; + " AND vm_instance.removed is NULL"
ResultSet rsAccount = null; + " AND vm_instance.vm_type='User' AND state not in ('Destroyed', 'Error', 'Expunging')";
ResultSet rsCount = null; String sqlSelectTotalVolumeSize =
try { "SELECT sum(size) FROM `cloud`.`volumes` WHERE account_id= ?"
pstmt1 = conn.prepareStatement("select id, domain_id FROM `cloud`.`account` where removed is NULL "); + " AND (path is not NULL OR state in ('Allocated')) AND removed is NULL"
rsAccount = pstmt1.executeQuery(); + " AND instance_id IN (SELECT id FROM `cloud`.`vm_instance` WHERE vm_type='User')";
String sqlSelectTotalPathlessVolumeSize =
"SELECT sum(size) FROM `cloud`.`volumes` WHERE account_id= ?"
+ " AND path is NULL AND state not in ('Allocated') AND removed is NULL";
String sqlSelectTotalSnapshotSize = "SELECT sum(size) FROM `cloud`.`snapshots` WHERE account_id= ? AND removed is NULL";
String sqlSelectTotalTemplateStoreSize = "SELECT sum(template_store_ref.size) FROM `cloud`.`template_store_ref`,`cloud`.`vm_template` WHERE account_id = ?"
+ " AND template_store_ref.template_id = vm_template.id AND download_state = 'DOWNLOADED' AND destroyed = false AND removed is NULL";
String sqlSelectDomainIds = "select id FROM `cloud`.`domain`";
String sqlSelectAccountCount = "select account.domain_id,sum(resource_count.count) from `cloud`.`account` left join `cloud`.`resource_count` on account.id=resource_count.account_id "
+ "where resource_count.type=? group by account.domain_id;";
try (
PreparedStatement pstmtSelectAccountIds = conn.prepareStatement(sqlSelectAccountIds);
PreparedStatement pstmtSelectOfferingTotals = conn.prepareStatement(sqlSelectOfferingTotals);
PreparedStatement pstmtSelectTotalVolumeSize = conn.prepareStatement(sqlSelectTotalVolumeSize);
PreparedStatement pstmtSelectTotalPathlessVolumeSize = conn.prepareStatement(sqlSelectTotalPathlessVolumeSize);
PreparedStatement pstmtSelectTotalSnapshotSize = conn.prepareStatement(sqlSelectTotalSnapshotSize);
PreparedStatement pstmtSelectTotalTemplateStoreSize = conn.prepareStatement(sqlSelectTotalTemplateStoreSize);
PreparedStatement pstmtSelectDomainIds = conn.prepareStatement(sqlSelectDomainIds);
PreparedStatement pstmtSelectAccountCount = conn.prepareStatement(sqlSelectAccountCount);
ResultSet rsAccount = pstmtSelectAccountIds.executeQuery();
) {
while (rsAccount.next()) { while (rsAccount.next()) {
long account_id = rsAccount.getLong(1); long account_id = rsAccount.getLong(1);
long domain_id = rsAccount.getLong(2); long domain_id = rsAccount.getLong(2);
// 1. update cpu,memory for all accounts // 1. update cpu,memory for all accounts
pstmt2 = pstmtSelectOfferingTotals.setLong(1, account_id);
conn.prepareStatement("SELECT SUM(service_offering.cpu), SUM(service_offering.ram_size)" + " FROM `cloud`.`vm_instance`, `cloud`.`service_offering`" try (ResultSet rsOfferingTotals = pstmtSelectOfferingTotals.executeQuery();) {
+ " WHERE vm_instance.service_offering_id = service_offering.id AND vm_instance.account_id = ?" + " AND vm_instance.removed is NULL" if (rsOfferingTotals.next()) {
+ " AND vm_instance.vm_type='User' AND state not in ('Destroyed', 'Error', 'Expunging')"); upgradeResourceCountforAccount(conn, account_id, domain_id, "cpu", rsOfferingTotals.getLong(1));
pstmt2.setLong(1, account_id); upgradeResourceCountforAccount(conn, account_id, domain_id, "memory", rsOfferingTotals.getLong(2));
rsCount = pstmt2.executeQuery(); } else {
if (rsCount.next()) { upgradeResourceCountforAccount(conn, account_id, domain_id, "cpu", 0L);
upgradeResourceCountforAccount(conn, account_id, domain_id, "cpu", rsCount.getLong(1)); upgradeResourceCountforAccount(conn, account_id, domain_id, "memory", 0L);
upgradeResourceCountforAccount(conn, account_id, domain_id, "memory", rsCount.getLong(2)); }
} else {
upgradeResourceCountforAccount(conn, account_id, domain_id, "cpu", 0L);
upgradeResourceCountforAccount(conn, account_id, domain_id, "memory", 0L);
} }
rsCount.close();
// 2. update primary_storage for all accounts // 2. update primary_storage for all accounts
pstmt3 = pstmtSelectTotalVolumeSize.setLong(1, account_id);
conn.prepareStatement("SELECT sum(size) FROM `cloud`.`volumes` WHERE account_id= ?" try (ResultSet rsTotalVolumeSize = pstmtSelectTotalVolumeSize.executeQuery();) {
+ " AND (path is not NULL OR state in ('Allocated')) AND removed is NULL" if (rsTotalVolumeSize.next()) {
+ " AND instance_id IN (SELECT id FROM `cloud`.`vm_instance` WHERE vm_type='User')"); upgradeResourceCountforAccount(conn, account_id, domain_id, "primary_storage", rsTotalVolumeSize.getLong(1));
pstmt3.setLong(1, account_id); } else {
rsCount = pstmt3.executeQuery(); upgradeResourceCountforAccount(conn, account_id, domain_id, "primary_storage", 0L);
if (rsCount.next()) { }
upgradeResourceCountforAccount(conn, account_id, domain_id, "primary_storage", rsCount.getLong(1));
} else {
upgradeResourceCountforAccount(conn, account_id, domain_id, "primary_storage", 0L);
} }
rsCount.close();
// 3. update secondary_storage for all accounts // 3. update secondary_storage for all accounts
long totalVolumesSize = 0; long totalVolumesSize = 0;
long totalSnapshotsSize = 0; long totalSnapshotsSize = 0;
long totalTemplatesSize = 0; long totalTemplatesSize = 0;
pstmt4 = pstmtSelectTotalPathlessVolumeSize.setLong(1, account_id);
conn.prepareStatement("SELECT sum(size) FROM `cloud`.`volumes` WHERE account_id= ?" try (ResultSet rsTotalPathlessVolumeSize = pstmtSelectTotalPathlessVolumeSize.executeQuery();) {
+ " AND path is NULL AND state not in ('Allocated') AND removed is NULL"); if (rsTotalPathlessVolumeSize.next()) {
pstmt4.setLong(1, account_id); totalVolumesSize = rsTotalPathlessVolumeSize.getLong(1);
rsCount = pstmt4.executeQuery(); }
if (rsCount.next()) {
totalVolumesSize = rsCount.getLong(1);
} }
rsCount.close();
pstmt4.close();
pstmt4 = conn.prepareStatement("SELECT sum(size) FROM `cloud`.`snapshots` WHERE account_id= ? AND removed is NULL"); pstmtSelectTotalSnapshotSize.setLong(1, account_id);
pstmt4.setLong(1, account_id); try (ResultSet rsTotalSnapshotSize = pstmtSelectTotalSnapshotSize.executeQuery();) {
rsCount = pstmt4.executeQuery(); if (rsTotalSnapshotSize.next()) {
if (rsCount.next()) { totalSnapshotsSize = rsTotalSnapshotSize.getLong(1);
totalSnapshotsSize = rsCount.getLong(1); }
} }
rsCount.close(); pstmtSelectTotalTemplateStoreSize.setLong(1, account_id);
pstmt4.close(); try (ResultSet rsTotalTemplateStoreSize = pstmtSelectTotalTemplateStoreSize.executeQuery();) {
if (rsTotalTemplateStoreSize.next()) {
pstmt4 = totalTemplatesSize = rsTotalTemplateStoreSize.getLong(1);
conn.prepareStatement("SELECT sum(template_store_ref.size) FROM `cloud`.`template_store_ref`,`cloud`.`vm_template` WHERE account_id = ?" }
+ " AND template_store_ref.template_id = vm_template.id AND download_state = 'DOWNLOADED' AND destroyed = false AND removed is NULL");
pstmt4.setLong(1, account_id);
rsCount = pstmt4.executeQuery();
if (rsCount.next()) {
totalTemplatesSize = rsCount.getLong(1);
} }
upgradeResourceCountforAccount(conn, account_id, domain_id, "secondary_storage", totalVolumesSize + totalSnapshotsSize + totalTemplatesSize); upgradeResourceCountforAccount(conn, account_id, domain_id, "secondary_storage", totalVolumesSize + totalSnapshotsSize + totalTemplatesSize);
} }
@ -212,56 +216,29 @@ public class Upgrade420to421 implements DbUpgrade {
// 4. upgrade cpu,memory,primary_storage,secondary_storage for domains // 4. upgrade cpu,memory,primary_storage,secondary_storage for domains
String resource_types[] = {"cpu", "memory", "primary_storage", "secondary_storage"}; String resource_types[] = {"cpu", "memory", "primary_storage", "secondary_storage"};
pstmt5 = conn.prepareStatement("select id FROM `cloud`.`domain`"); try (ResultSet rsDomainIds = pstmtSelectDomainIds.executeQuery();) {
rsAccount = pstmt5.executeQuery(); while (rsDomainIds.next()) {
while (rsAccount.next()) { long domain_id = rsDomainIds.getLong(1);
long domain_id = rsAccount.getLong(1); for (int count = 0; count < resource_types.length; count++) {
for (int count = 0; count < resource_types.length; count++) { String resource_type = resource_types[count];
String resource_type = resource_types[count]; upgradeResourceCountforDomain(conn, domain_id, resource_type, 0L); // reset value to 0 before statistics
upgradeResourceCountforDomain(conn, domain_id, resource_type, 0L); // reset value to 0 before statistics }
} }
} }
for (int count = 0; count < resource_types.length; count++) { for (int count = 0; count < resource_types.length; count++) {
String resource_type = resource_types[count]; String resource_type = resource_types[count];
pstmt5 = pstmtSelectAccountCount.setString(1, resource_type);
conn.prepareStatement("select account.domain_id,sum(resource_count.count) from `cloud`.`account` left join `cloud`.`resource_count` on account.id=resource_count.account_id " try (ResultSet rsAccountCount = pstmtSelectAccountCount.executeQuery();) {
+ "where resource_count.type=? group by account.domain_id;"); while (rsAccountCount.next()) {
pstmt5.setString(1, resource_type); long domain_id = rsAccountCount.getLong(1);
rsCount = pstmt5.executeQuery(); long resource_count = rsAccountCount.getLong(2);
while (rsCount.next()) { upgradeResourceCountforDomain(conn, domain_id, resource_type, resource_count);
long domain_id = rsCount.getLong(1); }
long resource_count = rsCount.getLong(2);
upgradeResourceCountforDomain(conn, domain_id, resource_type, resource_count);
} }
} }
s_logger.debug("upgradeResourceCount finish"); s_logger.debug("upgradeResourceCount finish");
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Unable to upgrade resource count (cpu,memory,primary_storage,secondary_storage) ", e); throw new CloudRuntimeException("Unable to upgrade resource count (cpu,memory,primary_storage,secondary_storage) ", e);
} finally {
try {
if (rsAccount != null) {
rsAccount.close();
}
if (rsCount != null) {
rsCount.close();
}
if (pstmt1 != null) {
pstmt1.close();
}
if (pstmt2 != null) {
pstmt2.close();
}
if (pstmt3 != null) {
pstmt3.close();
}
if (pstmt4 != null) {
pstmt4.close();
}
if (pstmt5 != null) {
pstmt5.close();
}
} catch (SQLException e) {
}
} }
} }

View File

@ -32,6 +32,7 @@ import java.util.Set;
import com.cloud.hypervisor.Hypervisor; import com.cloud.hypervisor.Hypervisor;
import com.cloud.utils.crypt.DBEncryptionUtil; import com.cloud.utils.crypt.DBEncryptionUtil;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.exception.CloudRuntimeException;
@ -76,31 +77,20 @@ public class Upgrade442to450 implements DbUpgrade {
} }
private void updateMaxRouterSizeConfig(Connection conn) { private void updateMaxRouterSizeConfig(Connection conn) {
PreparedStatement updatePstmt = null; String sqlUpdateConfig = "UPDATE `cloud`.`configuration` SET value=? WHERE name='router.ram.size' AND category='Hidden'";
try { try (PreparedStatement updatePstmt = conn.prepareStatement(sqlUpdateConfig);){
String encryptedValue = DBEncryptionUtil.encrypt("256"); String encryptedValue = DBEncryptionUtil.encrypt("256");
updatePstmt = conn.prepareStatement("UPDATE `cloud`.`configuration` SET value=? WHERE name='router.ram.size' AND category='Hidden'");
updatePstmt.setBytes(1, encryptedValue.getBytes("UTF-8")); updatePstmt.setBytes(1, encryptedValue.getBytes("UTF-8"));
updatePstmt.executeUpdate(); updatePstmt.executeUpdate();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Unable to upgrade max ram size of router in config.", e); throw new CloudRuntimeException("Unable to upgrade max ram size of router in config.", e);
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
throw new CloudRuntimeException("Unable encrypt configuration values ", e); throw new CloudRuntimeException("Unable encrypt configuration values ", e);
} finally {
try {
if (updatePstmt != null) {
updatePstmt.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Done updating router.ram.size config to 256"); s_logger.debug("Done updating router.ram.size config to 256");
} }
private void upgradeMemoryOfVirtualRoutervmOffering(Connection conn) { private void upgradeMemoryOfVirtualRoutervmOffering(Connection conn) {
PreparedStatement updatePstmt = null;
PreparedStatement selectPstmt = null;
ResultSet selectResultSet = null;
int newRamSize = 256; //256MB int newRamSize = 256; //256MB
long serviceOfferingId = 0; long serviceOfferingId = 0;
@ -109,10 +99,11 @@ public class Upgrade442to450 implements DbUpgrade {
* We should not update/modify any user-defined offering. * We should not update/modify any user-defined offering.
*/ */
try { try (
selectPstmt = conn.prepareStatement("SELECT id FROM `cloud`.`service_offering` WHERE vm_type='domainrouter'"); PreparedStatement selectPstmt = conn.prepareStatement("SELECT id FROM `cloud`.`service_offering` WHERE vm_type='domainrouter'");
updatePstmt = conn.prepareStatement("UPDATE `cloud`.`service_offering` SET ram_size=? WHERE id=?"); PreparedStatement updatePstmt = conn.prepareStatement("UPDATE `cloud`.`service_offering` SET ram_size=? WHERE id=?");
selectResultSet = selectPstmt.executeQuery(); ResultSet selectResultSet = selectPstmt.executeQuery();
) {
if(selectResultSet.next()) { if(selectResultSet.next()) {
serviceOfferingId = selectResultSet.getLong("id"); serviceOfferingId = selectResultSet.getLong("id");
} }
@ -122,19 +113,6 @@ public class Upgrade442to450 implements DbUpgrade {
updatePstmt.executeUpdate(); updatePstmt.executeUpdate();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Unable to upgrade ram_size of service offering for domain router. ", e); throw new CloudRuntimeException("Unable to upgrade ram_size of service offering for domain router. ", e);
} finally {
try {
if (selectPstmt != null) {
selectPstmt.close();
}
if (selectResultSet != null) {
selectResultSet.close();
}
if (updatePstmt != null) {
updatePstmt.close();
}
} catch (SQLException e) {
}
} }
s_logger.debug("Done upgrading RAM for service offering of domain router to " + newRamSize); s_logger.debug("Done upgrading RAM for service offering of domain router to " + newRamSize);
} }

View File

@ -134,15 +134,4 @@ public class ClusterServiceServletImpl implements ClusterService {
return s_client; return s_client;
} }
// for test purpose only
public static void main(final String[] args) {
/*
ClusterServiceServletImpl service = new ClusterServiceServletImpl("http://localhost:9090/clusterservice", 300);
try {
String result = service.execute("test", 1, "{ p1:v1, p2:v2 }", true);
System.out.println(result);
} catch (RemoteException e) {
}
*/
}
} }

View File

@ -142,26 +142,17 @@ public class ConnectionConcierge {
} }
protected String testValidity(String name, Connection conn) { protected String testValidity(String name, Connection conn) {
PreparedStatement pstmt = null; if (conn != null) {
try { synchronized (conn) {
if (conn != null) { try (PreparedStatement pstmt = conn.prepareStatement("SELECT 1");) {
synchronized (conn) {
pstmt = conn.prepareStatement("SELECT 1");
pstmt.executeQuery(); pstmt.executeQuery();
} } catch (Throwable th) {
} s_logger.error("Unable to keep the db connection for " + name, th);
return null; return th.toString();
} catch (Throwable th) {
s_logger.error("Unable to keep the db connection for " + name, th);
return th.toString();
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
} }
} }
} }
return null;
} }
@Override @Override

View File

@ -43,6 +43,8 @@ import javax.persistence.Transient;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
public class DbUtil { public class DbUtil {
protected final static Logger s_logger = Logger.getLogger(DbUtil.class); protected final static Logger s_logger = Logger.getLogger(DbUtil.class);
@ -280,16 +282,4 @@ public class DbUtil {
closeAutoCloseable(connection, "exception while close connection."); closeAutoCloseable(connection, "exception while close connection.");
} }
public static void closeAutoCloseable(AutoCloseable ac, String message) {
try {
if (ac != null) {
ac.close();
}
} catch (Exception e) {
s_logger.warn("[ignored] " + message, e);
}
}
} }

View File

@ -38,32 +38,17 @@ public class TransactionTest {
@BeforeClass @BeforeClass
public static void oneTimeSetup() { public static void oneTimeSetup() {
Connection conn = null; try (
PreparedStatement pstmt = null; Connection conn = TransactionLegacy.getStandaloneConnection();
try { PreparedStatement pstmt =
conn = TransactionLegacy.getStandaloneConnection(); conn.prepareStatement("CREATE TABLE `cloud`.`test` (" + "`id` bigint unsigned NOT NULL UNIQUE AUTO_INCREMENT," + "`fld_int` int unsigned,"
+ "`fld_long` bigint unsigned," + "`fld_string` varchar(255)," + "PRIMARY KEY (`id`)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8;");
pstmt = ) {
conn.prepareStatement("CREATE TABLE `cloud`.`test` (" + "`id` bigint unsigned NOT NULL UNIQUE AUTO_INCREMENT," + "`fld_int` int unsigned,"
+ "`fld_long` bigint unsigned," + "`fld_string` varchar(255)," + "PRIMARY KEY (`id`)" + ") ENGINE=InnoDB DEFAULT CHARSET=utf8;");
pstmt.execute(); pstmt.execute();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Problem with sql", e); throw new CloudRuntimeException("Problem with sql", e);
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
}
}
} }
} }
@ -157,57 +142,25 @@ public class TransactionTest {
* Delete all records after each test, but table is still kept * Delete all records after each test, but table is still kept
*/ */
public void tearDown() { public void tearDown() {
Connection conn = null; try (
PreparedStatement pstmt = null; Connection conn = TransactionLegacy.getStandaloneConnection();
try { PreparedStatement pstmt = conn.prepareStatement("truncate table `cloud`.`test`");
conn = TransactionLegacy.getStandaloneConnection(); ) {
pstmt = conn.prepareStatement("truncate table `cloud`.`test`");
pstmt.execute(); pstmt.execute();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Problem with sql", e); throw new CloudRuntimeException("Problem with sql", e);
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
}
}
} }
} }
@AfterClass @AfterClass
public static void oneTimeTearDown() { public static void oneTimeTearDown() {
Connection conn = null; try (
PreparedStatement pstmt = null; Connection conn = TransactionLegacy.getStandaloneConnection();
try { PreparedStatement pstmt = conn.prepareStatement("DROP TABLE IF EXISTS `cloud`.`test`");
conn = TransactionLegacy.getStandaloneConnection(); ) {
pstmt = conn.prepareStatement("DROP TABLE IF EXISTS `cloud`.`test`");
pstmt.execute(); pstmt.execute();
} catch (SQLException e) { } catch (SQLException e) {
throw new CloudRuntimeException("Problem with sql", e); throw new CloudRuntimeException("Problem with sql", e);
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
}
}
if (conn != null) {
try {
conn.close();
} catch (SQLException e) {
}
}
} }
} }
} }

View File

@ -125,6 +125,7 @@ public class OnwireClassRegistry {
} catch (IOException e) { } catch (IOException e) {
s_logger.debug("Encountered IOException", e); s_logger.debug("Encountered IOException", e);
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
s_logger.info("[ignored] class not found", e);
} }
return classes; return classes;
} }
@ -139,6 +140,7 @@ public class OnwireClassRegistry {
Class<?> clazz = Class.forName(name); Class<?> clazz = Class.forName(name);
classes.add(clazz); classes.add(clazz);
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
s_logger.info("[ignored] class not found in directory " + directory, e);
} catch (Exception e) { } catch (Exception e) {
s_logger.debug("Encountered unexpect exception! ", e); s_logger.debug("Encountered unexpect exception! ", e);
} }

View File

@ -1,58 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cloudstack.framework.codestyle;
import org.apache.cloudstack.framework.rpc.RpcCallbackDispatcher;
import org.apache.cloudstack.framework.rpc.RpcClientCall;
import org.apache.cloudstack.framework.rpc.RpcException;
import org.apache.cloudstack.framework.rpc.RpcIOException;
import org.apache.cloudstack.framework.rpc.RpcProvider;
import org.apache.cloudstack.framework.rpc.RpcTimeoutException;
public class ClientOnlyEventDrivenStyle {
RpcProvider _rpcProvider;
public void AsyncCallRpcService() {
String cmd = new String();
RpcCallbackDispatcher<ClientOnlyEventDrivenStyle> callbackDispatcher = RpcCallbackDispatcher.create(this);
callbackDispatcher.setCallback(callbackDispatcher.getTarget().OnAsyncCallRpcServiceCallback(null, null));
_rpcProvider.newCall("host-2")
.setCommand("TestCommand")
.setCommandArg(cmd)
.setTimeout(10000)
.setCallbackDispatcher(callbackDispatcher)
.setContext("Context Object")
// save context object for callback handler
.apply();
}
public Void OnAsyncCallRpcServiceCallback(RpcClientCall call, String context) {
try {
String answer = call.get();
} catch (RpcTimeoutException e) {
} catch (RpcIOException e) {
} catch (RpcException e) {
}
return null;
}
}

View File

@ -1,58 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.cloudstack.framework.codestyle;
import org.apache.cloudstack.framework.rpc.RpcCallbackListener;
import org.apache.cloudstack.framework.rpc.RpcClientCall;
import org.apache.cloudstack.framework.rpc.RpcException;
import org.apache.cloudstack.framework.rpc.RpcIOException;
import org.apache.cloudstack.framework.rpc.RpcProvider;
import org.apache.cloudstack.framework.rpc.RpcTimeoutException;
public class ClientOnlyListenerStyle {
RpcProvider _rpcProvider;
public void AsyncCallRpcService() {
String cmd = new String();
_rpcProvider.newCall("host-2").setCommand("TestCommand").setCommandArg(cmd).setTimeout(10000).addCallbackListener(new RpcCallbackListener<String>() {
@Override
public void onSuccess(String result) {
}
@Override
public void onFailure(RpcException e) {
}
}).apply();
}
public void SyncCallRpcService() {
String cmd = new String();
RpcClientCall call = _rpcProvider.newCall("host-2").setCommand("TestCommand").setCommandArg(cmd).setTimeout(10000).apply();
try {
String answer = call.get();
} catch (RpcTimeoutException e) {
} catch (RpcIOException e) {
} catch (RpcException e) {
}
}
}

View File

@ -103,16 +103,19 @@ public class ModuleBasedContextFactoryTest {
assertEquals(parent, parentBean); assertEquals(parent, parentBean);
} }
int notfound = 0;
for (String notThere : notTheres) { for (String notThere : notTheres) {
try { try {
context.getBean(notThere, String.class); context.getBean(notThere, String.class);
fail(); fail();
} catch (NoSuchBeanDefinitionException e) { } catch (NoSuchBeanDefinitionException e) {
notfound++;
} }
} }
int count = context.getBean("count", InstantiationCounter.class).getCount(); int count = context.getBean("count", InstantiationCounter.class).getCount();
assertEquals(notTheres.length, notfound);
assertEquals(order, count); assertEquals(order, count);
} }

View File

@ -2363,10 +2363,8 @@ public class HypervDirectConnectResource extends ServerResourceBase implements S
// VM patching/rebooting time that may need // VM patching/rebooting time that may need
int retry = _retry; int retry = _retry;
while (System.currentTimeMillis() - startTick <= _opsTimeout || --retry > 0) { while (System.currentTimeMillis() - startTick <= _opsTimeout || --retry > 0) {
SocketChannel sch = null; s_logger.info("Trying to connect to " + ipAddress);
try { try (SocketChannel sch = SocketChannel.open();) {
s_logger.info("Trying to connect to " + ipAddress);
sch = SocketChannel.open();
sch.configureBlocking(true); sch.configureBlocking(true);
sch.socket().setSoTimeout(5000); sch.socket().setSoTimeout(5000);
// we need to connect to the control ip address to check the status of the system vm // we need to connect to the control ip address to check the status of the system vm
@ -2385,13 +2383,6 @@ public class HypervDirectConnectResource extends ServerResourceBase implements S
s_logger.debug("[ignored] interupted while waiting to retry connecting to vm after exception: "+e.getLocalizedMessage()); s_logger.debug("[ignored] interupted while waiting to retry connecting to vm after exception: "+e.getLocalizedMessage());
} }
} }
} finally {
if (sch != null) {
try {
sch.close();
} catch (IOException e) {
}
}
} }
try { try {

View File

@ -136,6 +136,7 @@ public class AgentRoutingResource extends AgentStorageResource {
try { try {
clz = Class.forName(objectType); clz = Class.forName(objectType);
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
s_logger.info("[ignored] ping returned class", e);
} }
if (clz != null) { if (clz != null) {
StringReader reader = new StringReader(objectData); StringReader reader = new StringReader(objectData);

View File

@ -18,6 +18,7 @@ package com.cloud.hypervisor.vmware.resource;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.ConnectException; import java.net.ConnectException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.URI; import java.net.URI;
@ -33,12 +34,10 @@ import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.UUID; import java.util.UUID;
import java.io.UnsupportedEncodingException;
import javax.naming.ConfigurationException; import javax.naming.ConfigurationException;
@ -216,8 +215,8 @@ import com.cloud.dc.Vlan;
import com.cloud.exception.CloudException; import com.cloud.exception.CloudException;
import com.cloud.exception.InternalErrorException; import com.cloud.exception.InternalErrorException;
import com.cloud.host.Host.Type; import com.cloud.host.Host.Type;
import com.cloud.hypervisor.guru.VMwareGuru;
import com.cloud.hypervisor.Hypervisor.HypervisorType; import com.cloud.hypervisor.Hypervisor.HypervisorType;
import com.cloud.hypervisor.guru.VMwareGuru;
import com.cloud.hypervisor.vmware.manager.VmwareHostService; import com.cloud.hypervisor.vmware.manager.VmwareHostService;
import com.cloud.hypervisor.vmware.manager.VmwareManager; import com.cloud.hypervisor.vmware.manager.VmwareManager;
import com.cloud.hypervisor.vmware.manager.VmwareStorageMount; import com.cloud.hypervisor.vmware.manager.VmwareStorageMount;
@ -566,7 +565,7 @@ public class VmwareResource implements StoragePoolResource, ServerResource, Vmwa
// we need to spawn a worker VM to attach the volume to and // we need to spawn a worker VM to attach the volume to and
// resize the volume. // resize the volume.
useWorkerVm = true; useWorkerVm = true;
vmName = this.getWorkerName(getServiceContext(), cmd, 0); vmName = getWorkerName(getServiceContext(), cmd, 0);
morDS = HypervisorHostHelper.findDatastoreWithBackwardsCompatibility(hyperHost, poolId); morDS = HypervisorHostHelper.findDatastoreWithBackwardsCompatibility(hyperHost, poolId);
dsMo = new DatastoreMO(hyperHost.getContext(), morDS); dsMo = new DatastoreMO(hyperHost.getContext(), morDS);
@ -4803,10 +4802,8 @@ public class VmwareResource implements StoragePoolResource, ServerResource, Vmwa
// VM patching/rebooting time that may need // VM patching/rebooting time that may need
int retry = _retry; int retry = _retry;
while (System.currentTimeMillis() - startTick <= _opsTimeout || --retry > 0) { while (System.currentTimeMillis() - startTick <= _opsTimeout || --retry > 0) {
SocketChannel sch = null; s_logger.info("Trying to connect to " + ipAddress);
try { try (SocketChannel sch = SocketChannel.open();) {
s_logger.info("Trying to connect to " + ipAddress);
sch = SocketChannel.open();
sch.configureBlocking(true); sch.configureBlocking(true);
sch.socket().setSoTimeout(5000); sch.socket().setSoTimeout(5000);
@ -4825,13 +4822,6 @@ public class VmwareResource implements StoragePoolResource, ServerResource, Vmwa
s_logger.debug("[ignored] interupted while waiting to retry connect after failure.", e); s_logger.debug("[ignored] interupted while waiting to retry connect after failure.", e);
} }
} }
} finally {
if (sch != null) {
try {
sch.close();
} catch (IOException e) {
}
}
} }
try { try {

View File

@ -17,6 +17,8 @@
package org.apache.cloudstack.network.contrail.management; package org.apache.cloudstack.network.contrail.management;
import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
import java.io.File; import java.io.File;
import java.io.FileInputStream; import java.io.FileInputStream;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
@ -38,7 +40,6 @@ import com.cloud.network.dao.NetworkVO;
import com.cloud.offering.NetworkOffering; import com.cloud.offering.NetworkOffering;
import com.cloud.user.Account; import com.cloud.user.Account;
import com.cloud.utils.PropertiesUtil; import com.cloud.utils.PropertiesUtil;
/** /**
* ManagementNetworkGuru * ManagementNetworkGuru
* *
@ -81,10 +82,7 @@ public class ManagementNetworkGuru extends ContrailGuru {
s_logger.error(e.getMessage()); s_logger.error(e.getMessage());
throw new ConfigurationException(e.getMessage()); throw new ConfigurationException(e.getMessage());
} finally { } finally {
try { closeAutoCloseable(inputFile, "error closing config file");
inputFile.close();
} catch (IOException e) {
}
} }
_mgmtCidr = configProps.getProperty("management.cidr"); _mgmtCidr = configProps.getProperty("management.cidr");
_mgmtGateway = configProps.getProperty("management.gateway"); _mgmtGateway = configProps.getProperty("management.gateway");

View File

@ -22,6 +22,8 @@ import java.io.Serializable;
import java.lang.ref.WeakReference; import java.lang.ref.WeakReference;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.log4j.Logger;
import com.cloud.exception.InternalErrorException; import com.cloud.exception.InternalErrorException;
/** /**
@ -43,6 +45,7 @@ public interface ModelObject {
public static class ModelReference implements Comparable<ModelReference>, Serializable { public static class ModelReference implements Comparable<ModelReference>, Serializable {
private static final long serialVersionUID = -2019113974956703526L; private static final long serialVersionUID = -2019113974956703526L;
private static final Logger s_logger = Logger.getLogger(ModelReference.class);
/* /*
* WeakReference class is not serializable by definition. So, we cannot enforce its serialization unless we write the implementation of * WeakReference class is not serializable by definition. So, we cannot enforce its serialization unless we write the implementation of
@ -86,8 +89,9 @@ public interface ModelObject {
ModelReference rhs = (ModelReference)other; ModelReference rhs = (ModelReference)other;
return compareTo(rhs) == 0; return compareTo(rhs) == 0;
} catch (ClassCastException ex) { } catch (ClassCastException ex) {
// not this class , so
return false;
} }
return false;
} }
public ModelObject get() { public ModelObject get() {

View File

@ -29,6 +29,8 @@ import java.util.UUID;
import javax.inject.Inject; import javax.inject.Inject;
import org.apache.log4j.Logger;
import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope; import org.apache.cloudstack.engine.subsystem.api.storage.ClusterScope;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStore; import org.apache.cloudstack.engine.subsystem.api.storage.DataStore;
import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager; import org.apache.cloudstack.engine.subsystem.api.storage.DataStoreManager;
@ -40,7 +42,6 @@ import org.apache.cloudstack.engine.subsystem.api.storage.ZoneScope;
import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao; import org.apache.cloudstack.storage.datastore.db.PrimaryDataStoreDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO; import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;
import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper; import org.apache.cloudstack.storage.volume.datastore.PrimaryDataStoreHelper;
import org.apache.log4j.Logger;
import com.cloud.agent.AgentManager; import com.cloud.agent.AgentManager;
import com.cloud.agent.api.Answer; import com.cloud.agent.api.Answer;
@ -189,6 +190,7 @@ public class CloudStackPrimaryDataStoreLifeCycleImpl implements PrimaryDataStore
try { try {
hostPath = URLDecoder.decode(uri.getPath(), "UTF-8"); hostPath = URLDecoder.decode(uri.getPath(), "UTF-8");
} catch (UnsupportedEncodingException e) { } catch (UnsupportedEncodingException e) {
s_logger.error("[ignored] we are on a platform not supporting \"UTF-8\"!?!", e);
} }
if (hostPath == null) { // if decoding fails, use getPath() anyway if (hostPath == null) { // if decoding fails, use getPath() anyway
hostPath = uri.getPath(); hostPath = uri.getPath();

View File

@ -94,6 +94,7 @@ public class SAML2LogoutAPIAuthenticatorCmd extends BaseCmd implements APIAuthen
try { try {
resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value()); resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value());
} catch (IOException ignored) { } catch (IOException ignored) {
s_logger.info("[ignored] sending redirected failed.", ignored);
} }
return responseString; return responseString;
} }
@ -123,6 +124,7 @@ public class SAML2LogoutAPIAuthenticatorCmd extends BaseCmd implements APIAuthen
try { try {
resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value()); resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value());
} catch (IOException ignored) { } catch (IOException ignored) {
s_logger.info("[ignored] second redirected sending failed.", ignored);
} }
return responseString; return responseString;
} }
@ -134,6 +136,7 @@ public class SAML2LogoutAPIAuthenticatorCmd extends BaseCmd implements APIAuthen
try { try {
resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value()); resp.sendRedirect(SAML2AuthManager.SAMLCloudStackRedirectionUrl.value());
} catch (IOException ignored) { } catch (IOException ignored) {
s_logger.info("[ignored] final redirected failed.", ignored);
} }
return responseString; return responseString;
} }

View File

@ -288,18 +288,21 @@ public class SAML2AuthManagerImpl extends AdapterBase implements SAML2AuthManage
try { try {
idpMetadata.setSigningCertificate(KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0)); idpMetadata.setSigningCertificate(KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0));
} catch (CertificateException ignored) { } catch (CertificateException ignored) {
s_logger.info("[ignored] encountered invalid certificate signing.", ignored);
} }
} }
if (kd.getUse() == UsageType.ENCRYPTION) { if (kd.getUse() == UsageType.ENCRYPTION) {
try { try {
idpMetadata.setEncryptionCertificate(KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0)); idpMetadata.setEncryptionCertificate(KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0));
} catch (CertificateException ignored) { } catch (CertificateException ignored) {
s_logger.info("[ignored] encountered invalid certificate encryption.", ignored);
} }
} }
if (kd.getUse() == UsageType.UNSPECIFIED) { if (kd.getUse() == UsageType.UNSPECIFIED) {
try { try {
unspecifiedKey = KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0); unspecifiedKey = KeyInfoHelper.getCertificates(kd.getKeyInfo()).get(0);
} catch (CertificateException ignored) { } catch (CertificateException ignored) {
s_logger.info("[ignored] encountered invalid certificate.", ignored);
} }
} }
} }

View File

@ -19,12 +19,15 @@
package org.apache.cloudstack.api.command; package org.apache.cloudstack.api.command;
import static org.junit.Assert.assertFalse;
import com.cloud.domain.Domain; import com.cloud.domain.Domain;
import com.cloud.user.AccountService; import com.cloud.user.AccountService;
import com.cloud.user.DomainManager; import com.cloud.user.DomainManager;
import com.cloud.user.UserAccountVO; import com.cloud.user.UserAccountVO;
import com.cloud.user.dao.UserAccountDao; import com.cloud.user.dao.UserAccountDao;
import com.cloud.utils.HttpUtils; import com.cloud.utils.HttpUtils;
import org.apache.cloudstack.api.ApiServerService; import org.apache.cloudstack.api.ApiServerService;
import org.apache.cloudstack.api.BaseCmd; import org.apache.cloudstack.api.BaseCmd;
import org.apache.cloudstack.api.ServerApiException; import org.apache.cloudstack.api.ServerApiException;
@ -64,6 +67,7 @@ import org.opensaml.saml2.core.impl.SubjectBuilder;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession; import javax.servlet.http.HttpSession;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.security.KeyPair; import java.security.KeyPair;
import java.security.cert.X509Certificate; import java.security.cert.X509Certificate;
@ -154,8 +158,6 @@ public class SAML2LoginAPIAuthenticatorCmdTest {
userAccountDaoField.setAccessible(true); userAccountDaoField.setAccessible(true);
userAccountDaoField.set(cmd, userAccountDao); userAccountDaoField.set(cmd, userAccountDao);
String spId = "someSPID";
String url = "someUrl";
KeyPair kp = SAMLUtils.generateRandomKeyPair(); KeyPair kp = SAMLUtils.generateRandomKeyPair();
X509Certificate cert = SAMLUtils.generateRandomX509Certificate(kp); X509Certificate cert = SAMLUtils.generateRandomX509Certificate(kp);
@ -187,10 +189,13 @@ public class SAML2LoginAPIAuthenticatorCmdTest {
// SSO SAMLResponse verification test, this should throw ServerApiException for auth failure // SSO SAMLResponse verification test, this should throw ServerApiException for auth failure
params.put(SAMLPluginConstants.SAML_RESPONSE, new String[]{"Some String"}); params.put(SAMLPluginConstants.SAML_RESPONSE, new String[]{"Some String"});
Mockito.stub(cmd.processSAMLResponse(Mockito.anyString())).toReturn(buildMockResponse()); Mockito.stub(cmd.processSAMLResponse(Mockito.anyString())).toReturn(buildMockResponse());
boolean failing = true;
try { try {
cmd.authenticate("command", params, session, InetAddress.getByName("127.0.0.1"), HttpUtils.RESPONSE_TYPE_JSON, new StringBuilder(), req, resp); cmd.authenticate("command", params, session, InetAddress.getByName("127.0.0.1"), HttpUtils.RESPONSE_TYPE_JSON, new StringBuilder(), req, resp);
} catch (ServerApiException ignored) { } catch (ServerApiException ignored) {
failing = false;
} }
assertFalse("authentication should not have succeeded", failing);
Mockito.verify(userAccountDao, Mockito.times(0)).getUserAccount(Mockito.anyString(), Mockito.anyLong()); Mockito.verify(userAccountDao, Mockito.times(0)).getUserAccount(Mockito.anyString(), Mockito.anyLong());
Mockito.verify(apiServer, Mockito.times(0)).verifyUser(Mockito.anyLong()); Mockito.verify(apiServer, Mockito.times(0)).verifyUser(Mockito.anyLong());
} }

View File

@ -16,6 +16,8 @@
// under the License. // under the License.
package com.cloud.network; package com.cloud.network;
import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStreamReader; import java.io.InputStreamReader;
@ -81,12 +83,7 @@ public class ExternalIpAddressAllocator extends AdapterBase implements IpAddrAll
} catch (IOException e) { } catch (IOException e) {
return new IpAddr(); return new IpAddr();
} finally { } finally {
if (in != null) { closeAutoCloseable(in, "closing buffered reader");
try {
in.close();
} catch (IOException e) {
}
}
} }
} }
@ -121,12 +118,7 @@ public class ExternalIpAddressAllocator extends AdapterBase implements IpAddrAll
} catch (IOException e) { } catch (IOException e) {
return false; return false;
} finally { } finally {
if (in != null) { closeAutoCloseable(in, "buffered reader close");
try {
in.close();
} catch (IOException e) {
}
}
} }
} }

View File

@ -693,6 +693,7 @@ public class LoadBalancingRulesManagerImpl<Type> extends ManagerBase implements
if (backupState.equals(FirewallRule.State.Active)) if (backupState.equals(FirewallRule.State.Active))
applyLoadBalancerConfig(cmd.getLbRuleId()); applyLoadBalancerConfig(cmd.getLbRuleId());
} catch (ResourceUnavailableException e1) { } catch (ResourceUnavailableException e1) {
s_logger.info("[ignored] applying load balancer config.", e1);
} finally { } finally {
loadBalancer.setState(backupState); loadBalancer.setState(backupState);
_lbDao.persist(loadBalancer); _lbDao.persist(loadBalancer);

View File

@ -43,7 +43,6 @@ import javax.crypto.SecretKey;
import javax.inject.Inject; import javax.inject.Inject;
import javax.naming.ConfigurationException; import javax.naming.ConfigurationException;
import com.cloud.utils.nio.Link;
import org.apache.commons.codec.binary.Base64; import org.apache.commons.codec.binary.Base64;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
@ -118,6 +117,7 @@ import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.db.TransactionStatus; import com.cloud.utils.db.TransactionStatus;
import com.cloud.utils.exception.CloudRuntimeException; import com.cloud.utils.exception.CloudRuntimeException;
import com.cloud.utils.net.NetUtils; import com.cloud.utils.net.NetUtils;
import com.cloud.utils.nio.Link;
import com.cloud.utils.script.Script; import com.cloud.utils.script.Script;
public class ConfigurationServerImpl extends ManagerBase implements ConfigurationServer { public class ConfigurationServerImpl extends ManagerBase implements ConfigurationServer {
@ -757,6 +757,7 @@ public class ConfigurationServerImpl extends ManagerBase implements Configuratio
try (DataInputStream dis = new DataInputStream(new FileInputStream(privkeyfile))) { try (DataInputStream dis = new DataInputStream(new FileInputStream(privkeyfile))) {
dis.readFully(arr1); dis.readFully(arr1);
} catch (EOFException e) { } catch (EOFException e) {
s_logger.info("[ignored] eof reached");
} catch (Exception e) { } catch (Exception e) {
s_logger.error("Cannot read the private key file", e); s_logger.error("Cannot read the private key file", e);
throw new CloudRuntimeException("Cannot read the private key file"); throw new CloudRuntimeException("Cannot read the private key file");
@ -766,6 +767,7 @@ public class ConfigurationServerImpl extends ManagerBase implements Configuratio
try (DataInputStream dis = new DataInputStream(new FileInputStream(pubkeyfile))) { try (DataInputStream dis = new DataInputStream(new FileInputStream(pubkeyfile))) {
dis.readFully(arr2); dis.readFully(arr2);
} catch (EOFException e) { } catch (EOFException e) {
s_logger.info("[ignored] eof reached");
} catch (Exception e) { } catch (Exception e) {
s_logger.warn("Cannot read the public key file", e); s_logger.warn("Cannot read the public key file", e);
throw new CloudRuntimeException("Cannot read the public key file"); throw new CloudRuntimeException("Cannot read the public key file");
@ -902,7 +904,7 @@ public class ConfigurationServerImpl extends ManagerBase implements Configuratio
} else { } else {
command = new Script("/bin/bash", s_logger); command = new Script("/bin/bash", s_logger);
} }
if (this.isOnWindows()) { if (isOnWindows()) {
scriptPath = scriptPath.replaceAll("\\\\" ,"/" ); scriptPath = scriptPath.replaceAll("\\\\" ,"/" );
systemVmIsoPath = systemVmIsoPath.replaceAll("\\\\" ,"/" ); systemVmIsoPath = systemVmIsoPath.replaceAll("\\\\" ,"/" );
publicKeyPath = publicKeyPath.replaceAll("\\\\" ,"/" ); publicKeyPath = publicKeyPath.replaceAll("\\\\" ,"/" );

View File

@ -228,12 +228,14 @@ public class ConsoleProxyServlet extends HttpServlet {
try { try {
w = Integer.parseInt(value); w = Integer.parseInt(value);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
s_logger.info("[ignored] not a number: " + value);
} }
value = req.getParameter("h"); value = req.getParameter("h");
try { try {
h = Integer.parseInt(value); h = Integer.parseInt(value);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
s_logger.info("[ignored] not a number: " + value);
} }
try { try {

View File

@ -41,6 +41,7 @@ import com.cloud.utils.component.ComponentContext;
public class VpcApiUnitTest extends TestCase { public class VpcApiUnitTest extends TestCase {
@Inject @Inject
VpcManagerImpl _vpcService = null; VpcManagerImpl _vpcService = null;
VpcVO _vo = new VpcVO(1, "new vpc", "new vpc", 1, 1, 1, "0.0.0.0/0", "vpc domain", false, false, false);
@Override @Override
@Before @Before
@ -81,93 +82,68 @@ public class VpcApiUnitTest extends TestCase {
} }
} }
//1) correct network offering
@Test @Test
public void validateNtwkOffForVpc() { public void validateNtwkOffForVpc() {
//validate network offering //validate network offering
//1) correct network offering
VpcVO vo = new VpcVO(1, "new vpc", "new vpc", 1, 1, 1, "0.0.0.0/0", "vpc domain", false, false, false);
boolean result = false; boolean result = false;
try { try {
_vpcService.validateNtwkOffForNtwkInVpc(2L, 1, "0.0.0.0", "111-", vo, "10.1.1.1", new AccountVO(), null); _vpcService.validateNtwkOffForNtwkInVpc(2L, 1, "0.0.0.0", "111-", _vo, "10.1.1.1", new AccountVO(), null);
result = true; result = true;
} catch (Exception ex) {
} finally { } finally {
assertTrue("Validate network offering: Test passed: the offering is valid for vpc creation", result); assertTrue("Validate network offering: Test passed: the offering is valid for vpc creation", result);
} }
//2) invalid offering - source nat is not included }
result = false;
//2) invalid offering - source nat is not included
@Test(expected=InvalidParameterValueException.class)
public void validateNtwkOffForVpcInvalidMissingSourceNat() {
boolean result = false;
try { try {
_vpcService.validateNtwkOffForNtwkInVpc(2L, 2, "0.0.0.0", "111-", vo, "10.1.1.1", new AccountVO(), null); _vpcService.validateNtwkOffForNtwkInVpc(2L, 2, "0.0.0.0", "111-", _vo, "10.1.1.1", new AccountVO(), null);
result = true; result = true;
} catch (InvalidParameterValueException ex) {
} finally { } finally {
assertFalse("Validate network offering: TEST FAILED, can't use network offering without SourceNat service", result); assertFalse("Validate network offering: TEST FAILED, can't use network offering without SourceNat service", result);
} }
//3) invalid offering - conserve mode is off }
result = false;
//3) invalid offering - conserve mode is off
@Test(expected=InvalidParameterValueException.class)
public void validateNtwkOffForVpcInvalidNoConserveMode() {
boolean result = false;
try { try {
_vpcService.validateNtwkOffForNtwkInVpc(2L, 3, "0.0.0.0", "111-", vo, "10.1.1.1", new AccountVO(), null); _vpcService.validateNtwkOffForNtwkInVpc(2L, 3, "0.0.0.0", "111-", _vo, "10.1.1.1", new AccountVO(), null);
result = true; result = true;
} catch (InvalidParameterValueException ex) {
} finally { } finally {
assertFalse("Validate network offering: TEST FAILED, can't use network offering without conserve mode = true", result); assertFalse("Validate network offering: TEST FAILED, can't use network offering without conserve mode = true", result);
} }
//4) invalid offering - guest type shared }
result = false;
//4) invalid offering - guest type shared
@Test(expected=InvalidParameterValueException.class)
public void validateNtwkOffForVpcInvalidTypeIsGuest() {
boolean result = false;
try { try {
_vpcService.validateNtwkOffForNtwkInVpc(2L, 4, "0.0.0.0", "111-", vo, "10.1.1.1", new AccountVO(), null); _vpcService.validateNtwkOffForNtwkInVpc(2L, 4, "0.0.0.0", "111-", _vo, "10.1.1.1", new AccountVO(), null);
result = true; result = true;
} catch (InvalidParameterValueException ex) {
} finally { } finally {
assertFalse("Validate network offering: TEST FAILED, can't use network offering with guest type = Shared", result); assertFalse("Validate network offering: TEST FAILED, can't use network offering with guest type = Shared", result);
} }
//5) Invalid offering - no redundant router support }
result = false;
//5) Invalid offering - no redundant router support
@Test(expected=InvalidParameterValueException.class)
public void validateNtwkOffForVpcInvalidNoRVRSupport() {
boolean result = false;
try { try {
_vpcService.validateNtwkOffForNtwkInVpc(2L, 5, "0.0.0.0", "111-", vo, "10.1.1.1", new AccountVO(), null); _vpcService.validateNtwkOffForNtwkInVpc(2L, 5, "0.0.0.0", "111-", _vo, "10.1.1.1", new AccountVO(), null);
result = true; result = true;
} catch (InvalidParameterValueException ex) {
} finally { } finally {
assertFalse("TEST FAILED, can't use network offering with guest type = Shared", result); assertFalse("TEST FAILED, can't use network offering with guest type = Shared", result);
} }
} }
// public void destroyVpc() {
// boolean result = false;
// try {
// result = _vpcService.destroyVpc(vo, new AccountVO(), 1L);
// } catch (Exception ex) {
// s_logger.debug(ex);
// } finally {
// assertTrue("Failed to destroy VPC", result);
// }
// }
//
// public void deleteVpc() {
// //delete existing offering
// boolean result = false;
// try {
// List<String> svcs = new ArrayList<String>();
// svcs.add(Service.SourceNat.getName());
// result = _vpcService.deleteVpc(1);
// } catch (Exception ex) {
// } finally {
// assertTrue("Delete vpc: TEST FAILED, vpc failed to delete" + result, result);
// }
//
// //delete non-existing offering
// result = false;
// try {
// List<String> svcs = new ArrayList<String>();
// svcs.add(Service.SourceNat.getName());
// result = _vpcService.deleteVpc(100);
// } catch (Exception ex) {
// } finally {
// assertFalse("Delete vpc: TEST FAILED, true is returned when try to delete non existing vpc" + result, result);
// }
// }
} }

View File

@ -125,15 +125,12 @@ public class CreateNetworkOfferingTest extends TestCase {
assertNotNull("Shared network offering with specifyVlan=true failed to create ", off); assertNotNull("Shared network offering with specifyVlan=true failed to create ", off);
} }
@Test @Test(expected=InvalidParameterValueException.class)
public void createSharedNtwkOffWithNoVlan() { public void createSharedNtwkOffWithNoVlan() {
try { NetworkOfferingVO off =
NetworkOfferingVO off =
configMgr.createNetworkOffering("shared", "shared", TrafficType.Guest, null, false, Availability.Optional, 200, null, false, Network.GuestType.Shared, configMgr.createNetworkOffering("shared", "shared", TrafficType.Guest, null, false, Availability.Optional, 200, null, false, Network.GuestType.Shared,
false, null, false, null, true, false, null, false, null, true); false, null, false, null, true, false, null, false, null, true);
assertNull("Shared network offering with specifyVlan=false was created", off); assertNull("Shared network offering with specifyVlan=false was created", off);
} catch (InvalidParameterValueException ex) {
}
} }
@Test @Test
@ -145,15 +142,12 @@ public class CreateNetworkOfferingTest extends TestCase {
assertNotNull("Shared network offering with specifyIpRanges=true failed to create ", off); assertNotNull("Shared network offering with specifyIpRanges=true failed to create ", off);
} }
@Test @Test(expected=InvalidParameterValueException.class)
public void createSharedNtwkOffWithoutSpecifyIpRanges() { public void createSharedNtwkOffWithoutSpecifyIpRanges() {
try { NetworkOfferingVO off =
NetworkOfferingVO off =
configMgr.createNetworkOffering("shared", "shared", TrafficType.Guest, null, true, Availability.Optional, 200, null, false, Network.GuestType.Shared, configMgr.createNetworkOffering("shared", "shared", TrafficType.Guest, null, true, Availability.Optional, 200, null, false, Network.GuestType.Shared,
false, null, false, null, false, false, null, false, null, true); false, null, false, null, false, false, null, false, null, true);
assertNull("Shared network offering with specifyIpRanges=false was created", off); assertNull("Shared network offering with specifyIpRanges=false was created", off);
} catch (InvalidParameterValueException ex) {
}
} }
//Test Isolated network offerings //Test Isolated network offerings
@ -183,19 +177,16 @@ public class CreateNetworkOfferingTest extends TestCase {
} }
@Test @Test(expected=InvalidParameterValueException.class)
public void createIsolatedNtwkOffWithSpecifyIpRangesAndSourceNat() { public void createIsolatedNtwkOffWithSpecifyIpRangesAndSourceNat() {
try { Map<Service, Set<Provider>> serviceProviderMap = new HashMap<Network.Service, Set<Network.Provider>>();
Map<Service, Set<Provider>> serviceProviderMap = new HashMap<Network.Service, Set<Network.Provider>>(); Set<Network.Provider> vrProvider = new HashSet<Network.Provider>();
Set<Network.Provider> vrProvider = new HashSet<Network.Provider>(); vrProvider.add(Provider.VirtualRouter);
vrProvider.add(Provider.VirtualRouter); serviceProviderMap.put(Network.Service.SourceNat, vrProvider);
serviceProviderMap.put(Network.Service.SourceNat, vrProvider); NetworkOfferingVO off =
NetworkOfferingVO off =
configMgr.createNetworkOffering("isolated", "isolated", TrafficType.Guest, null, false, Availability.Optional, 200, serviceProviderMap, false, configMgr.createNetworkOffering("isolated", "isolated", TrafficType.Guest, null, false, Availability.Optional, 200, serviceProviderMap, false,
Network.GuestType.Isolated, false, null, false, null, true, false, null, false, null, true); Network.GuestType.Isolated, false, null, false, null, true, false, null, false, null, true);
assertNull("Isolated network offering with specifyIpRanges=true and source nat service enabled, was created", off); assertNull("Isolated network offering with specifyIpRanges=true and source nat service enabled, was created", off);
} catch (InvalidParameterValueException ex) {
}
} }
@Test @Test

View File

@ -21,12 +21,15 @@ import java.awt.image.DataBuffer;
import java.awt.image.DataBufferInt; import java.awt.image.DataBufferInt;
import java.util.Arrays; import java.util.Arrays;
import org.apache.log4j.Logger;
import streamer.BaseElement; import streamer.BaseElement;
import streamer.ByteBuffer; import streamer.ByteBuffer;
import streamer.Element; import streamer.Element;
import streamer.Link; import streamer.Link;
public class BufferedImagePixelsAdapter extends BaseElement { public class BufferedImagePixelsAdapter extends BaseElement {
private static final Logger s_logger = Logger.getLogger(BufferedImagePixelsAdapter.class);
public static final String TARGET_X = "x"; public static final String TARGET_X = "x";
public static final String TARGET_Y = "y"; public static final String TARGET_Y = "y";
@ -55,7 +58,7 @@ public class BufferedImagePixelsAdapter extends BaseElement {
@Override @Override
public void handleData(ByteBuffer buf, Link link) { public void handleData(ByteBuffer buf, Link link) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Data received: " + buf + "."); s_logger.debug("[" + this + "] INFO: Data received: " + buf + ".");
int x = (Integer)buf.getMetadata(TARGET_X); int x = (Integer)buf.getMetadata(TARGET_X);
int y = (Integer)buf.getMetadata(TARGET_Y); int y = (Integer)buf.getMetadata(TARGET_Y);
@ -100,6 +103,7 @@ public class BufferedImagePixelsAdapter extends BaseElement {
try { try {
System.arraycopy(intArray, srcLine * rectWidth, imageBuffer, x + dstLine * imageWidth, rectWidth); System.arraycopy(intArray, srcLine * rectWidth, imageBuffer, x + dstLine * imageWidth, rectWidth);
} catch (IndexOutOfBoundsException e) { } catch (IndexOutOfBoundsException e) {
s_logger.info("[ignored] copy error",e);
} }
} }
break; break;
@ -141,7 +145,7 @@ public class BufferedImagePixelsAdapter extends BaseElement {
String actualData = Arrays.toString(((DataBufferInt)canvas.getOfflineImage().getRaster().getDataBuffer()).getData()); String actualData = Arrays.toString(((DataBufferInt)canvas.getOfflineImage().getRaster().getDataBuffer()).getData());
String expectedData = Arrays.toString(pixelsLE); String expectedData = Arrays.toString(pixelsLE);
if (!actualData.equals(expectedData)) if (!actualData.equals(expectedData))
System.err.println("Actual image: " + actualData + "\nExpected image: " + expectedData + "."); s_logger.error("Actual image: " + actualData + "\nExpected image: " + expectedData + ".");
} }

View File

@ -16,11 +16,14 @@
// under the License. // under the License.
package streamer; package streamer;
import org.apache.log4j.Logger;
/** /**
* Link to transfer data in bounds of single thread (synchronized transfer). * Link to transfer data in bounds of single thread (synchronized transfer).
* Must not be used to send data to elements served in different threads. * Must not be used to send data to elements served in different threads.
*/ */
public class SyncLink implements Link { public class SyncLink implements Link {
private static final Logger s_logger = Logger.getLogger(SyncLink.class);
/** /**
* When null packet is pulled from source element, then make slight delay to * When null packet is pulled from source element, then make slight delay to
@ -112,7 +115,7 @@ public class SyncLink implements Link {
@Override @Override
public void pushBack(ByteBuffer buf) { public void pushBack(ByteBuffer buf) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Buffer pushed back: " + buf + "."); s_logger.debug("[" + this + "] INFO: Buffer pushed back: " + buf + ".");
if (cacheBuffer != null) { if (cacheBuffer != null) {
ByteBuffer tmp = cacheBuffer.join(buf); ByteBuffer tmp = cacheBuffer.join(buf);
@ -151,7 +154,7 @@ public class SyncLink implements Link {
throw new RuntimeException("[" + this + "] ERROR: link is not in push mode."); throw new RuntimeException("[" + this + "] ERROR: link is not in push mode.");
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Incoming buffer: " + buf + "."); s_logger.debug("[" + this + "] INFO: Incoming buffer: " + buf + ".");
if (buf == null && cacheBuffer == null) if (buf == null && cacheBuffer == null)
return; return;
@ -172,7 +175,7 @@ public class SyncLink implements Link {
while (cacheBuffer != null) { while (cacheBuffer != null) {
if (paused || hold) { if (paused || hold) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Transfer is paused. Data in cache buffer: " + cacheBuffer + "."); s_logger.debug("[" + this + "] INFO: Transfer is paused. Data in cache buffer: " + cacheBuffer + ".");
// Wait until rest of packet will be read // Wait until rest of packet will be read
return; return;
@ -180,7 +183,7 @@ public class SyncLink implements Link {
if (expectedPacketSize > 0 && cacheBuffer.length < expectedPacketSize) { if (expectedPacketSize > 0 && cacheBuffer.length < expectedPacketSize) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Transfer is suspended because available data is less than expected packet size. Expected packet size: " s_logger.debug("[" + this + "] INFO: Transfer is suspended because available data is less than expected packet size. Expected packet size: "
+ expectedPacketSize + ", data in cache buffer: " + cacheBuffer + "."); + expectedPacketSize + ", data in cache buffer: " + cacheBuffer + ".");
// Wait until rest of packet will be read // Wait until rest of packet will be read
@ -207,7 +210,7 @@ public class SyncLink implements Link {
public void sendEvent(Event event, Direction direction) { public void sendEvent(Event event, Direction direction) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Event " + event + " is received."); s_logger.debug("[" + this + "] INFO: Event " + event + " is received.");
// Shutdown main loop (if any) when STREAM_CLOSE event is received. // Shutdown main loop (if any) when STREAM_CLOSE event is received.
switch (event) { switch (event) {
@ -254,13 +257,14 @@ public class SyncLink implements Link {
if (paused) { if (paused) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Cannot pull, link is paused."); s_logger.debug("[" + this + "] INFO: Cannot pull, link is paused.");
// Make slight delay in such case, to avoid consuming 100% of CPU // Make slight delay in such case, to avoid consuming 100% of CPU
if (block) { if (block) {
try { try {
Thread.sleep(100); Thread.sleep(100);
} catch (InterruptedException e) { } catch (InterruptedException e) {
s_logger.info("[ignored] interupted during pull", e);
} }
} }
@ -271,7 +275,7 @@ public class SyncLink implements Link {
// then return it instead of asking for more data from source // then return it instead of asking for more data from source
if (cacheBuffer != null && (expectedPacketSize == 0 || (expectedPacketSize > 0 && cacheBuffer.length >= expectedPacketSize))) { if (cacheBuffer != null && (expectedPacketSize == 0 || (expectedPacketSize > 0 && cacheBuffer.length >= expectedPacketSize))) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Data pulled from cache buffer: " + cacheBuffer + "."); s_logger.debug("[" + this + "] INFO: Data pulled from cache buffer: " + cacheBuffer + ".");
ByteBuffer tmp = cacheBuffer; ByteBuffer tmp = cacheBuffer;
cacheBuffer = null; cacheBuffer = null;
@ -290,7 +294,7 @@ public class SyncLink implements Link {
// Can return something only when data was stored in buffer // Can return something only when data was stored in buffer
if (cacheBuffer != null && (expectedPacketSize == 0 || (expectedPacketSize > 0 && cacheBuffer.length >= expectedPacketSize))) { if (cacheBuffer != null && (expectedPacketSize == 0 || (expectedPacketSize > 0 && cacheBuffer.length >= expectedPacketSize))) {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Data pulled from source: " + cacheBuffer + "."); s_logger.debug("[" + this + "] INFO: Data pulled from source: " + cacheBuffer + ".");
ByteBuffer tmp = cacheBuffer; ByteBuffer tmp = cacheBuffer;
cacheBuffer = null; cacheBuffer = null;
@ -366,7 +370,7 @@ public class SyncLink implements Link {
sendEvent(Event.LINK_SWITCH_TO_PULL_MODE, Direction.IN); sendEvent(Event.LINK_SWITCH_TO_PULL_MODE, Direction.IN);
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Starting pull loop."); s_logger.debug("[" + this + "] INFO: Starting pull loop.");
// Pull source in loop // Pull source in loop
while (!shutdown) { while (!shutdown) {
@ -382,7 +386,7 @@ public class SyncLink implements Link {
} }
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Pull loop finished."); s_logger.debug("[" + this + "] INFO: Pull loop finished.");
} }
@ -397,7 +401,7 @@ public class SyncLink implements Link {
@Override @Override
public void setPullMode() { public void setPullMode() {
if (verbose) if (verbose)
System.out.println("[" + this + "] INFO: Switching to PULL mode."); s_logger.debug("[" + this + "] INFO: Switching to PULL mode.");
pullMode = true; pullMode = true;
} }

View File

@ -16,6 +16,8 @@
// under the License. // under the License.
package streamer.debug; package streamer.debug;
import org.apache.log4j.Logger;
import streamer.BaseElement; import streamer.BaseElement;
import streamer.ByteBuffer; import streamer.ByteBuffer;
import streamer.Direction; import streamer.Direction;
@ -25,6 +27,7 @@ import streamer.Link;
import streamer.SyncLink; import streamer.SyncLink;
public class FakeSource extends BaseElement { public class FakeSource extends BaseElement {
private static final Logger s_logger = Logger.getLogger(FakeSource.class);
/** /**
* Delay for null packets in poll method when blocking is requested, in * Delay for null packets in poll method when blocking is requested, in
@ -66,6 +69,7 @@ public class FakeSource extends BaseElement {
try { try {
Thread.sleep(delay); Thread.sleep(delay);
} catch (InterruptedException e) { } catch (InterruptedException e) {
s_logger.info("[ignored] interupted while creating latency", e);
} }
} }

View File

@ -16,6 +16,8 @@
// under the License. // under the License.
package com.cloud.consoleproxy; package com.cloud.consoleproxy;
import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@ -201,10 +203,7 @@ public class ConsoleProxyAjaxHandler implements HttpHandler {
s_logger.warn("Exception while reading request body: ", e); s_logger.warn("Exception while reading request body: ", e);
} finally { } finally {
if (closeStreamAfterRead) { if (closeStreamAfterRead) {
try { closeAutoCloseable(is, "error closing stream after read");
is.close();
} catch (IOException e) {
}
} }
} }
return sb.toString(); return sb.toString();

View File

@ -25,6 +25,7 @@ import java.util.List;
import com.cloud.consoleproxy.ConsoleProxyRdpClient; import com.cloud.consoleproxy.ConsoleProxyRdpClient;
import com.cloud.consoleproxy.util.ImageHelper; import com.cloud.consoleproxy.util.ImageHelper;
import com.cloud.consoleproxy.util.Logger;
import com.cloud.consoleproxy.util.TileInfo; import com.cloud.consoleproxy.util.TileInfo;
import com.cloud.consoleproxy.vnc.FrameBufferCanvas; import com.cloud.consoleproxy.vnc.FrameBufferCanvas;
@ -35,6 +36,7 @@ public class RdpBufferedImageCanvas extends BufferedImageCanvas implements Frame
* *
*/ */
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final Logger s_logger = Logger.getLogger(RdpBufferedImageCanvas.class);
private final ConsoleProxyRdpClient _rdpClient; private final ConsoleProxyRdpClient _rdpClient;
@ -66,6 +68,7 @@ public class RdpBufferedImageCanvas extends BufferedImageCanvas implements Frame
try { try {
imgBits = ImageHelper.jpegFromImage(bufferedImage); imgBits = ImageHelper.jpegFromImage(bufferedImage);
} catch (IOException e) { } catch (IOException e) {
s_logger.info("[ignored] read error on image", e);
} }
return imgBits; return imgBits;
@ -91,6 +94,7 @@ public class RdpBufferedImageCanvas extends BufferedImageCanvas implements Frame
try { try {
imgBits = ImageHelper.jpegFromImage(bufferedImage); imgBits = ImageHelper.jpegFromImage(bufferedImage);
} catch (IOException e) { } catch (IOException e) {
s_logger.info("[ignored] read error on image tiles", e);
} }
return imgBits; return imgBits;
} }

View File

@ -27,6 +27,7 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import com.cloud.consoleproxy.util.ImageHelper; import com.cloud.consoleproxy.util.ImageHelper;
import com.cloud.consoleproxy.util.Logger;
import com.cloud.consoleproxy.util.TileInfo; import com.cloud.consoleproxy.util.TileInfo;
/** /**
@ -35,6 +36,7 @@ import com.cloud.consoleproxy.util.TileInfo;
*/ */
public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas { public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
private static final Logger s_logger = Logger.getLogger(BufferedImageCanvas.class);
// Offline screen buffer // Offline screen buffer
private BufferedImage offlineImage; private BufferedImage offlineImage;
@ -42,7 +44,7 @@ public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas {
// Cached Graphics2D object for offline screen buffer // Cached Graphics2D object for offline screen buffer
private Graphics2D graphics; private Graphics2D graphics;
private PaintNotificationListener listener; private final PaintNotificationListener listener;
public BufferedImageCanvas(PaintNotificationListener listener, int width, int height) { public BufferedImageCanvas(PaintNotificationListener listener, int width, int height) {
super(); super();
@ -59,7 +61,7 @@ public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas {
} }
public void setCanvasSize(int width, int height) { public void setCanvasSize(int width, int height) {
this.offlineImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); offlineImage = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB);
graphics = offlineImage.createGraphics(); graphics = offlineImage.createGraphics();
setSize(offlineImage.getWidth(), offlineImage.getHeight()); setSize(offlineImage.getWidth(), offlineImage.getHeight());
@ -121,6 +123,7 @@ public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas {
try { try {
imgBits = ImageHelper.jpegFromImage(bufferedImage); imgBits = ImageHelper.jpegFromImage(bufferedImage);
} catch (IOException e) { } catch (IOException e) {
s_logger.info("[ignored] read error on image", e);
} }
return imgBits; return imgBits;
} }
@ -144,6 +147,7 @@ public class BufferedImageCanvas extends Canvas implements FrameBufferCanvas {
try { try {
imgBits = ImageHelper.jpegFromImage(bufferedImage); imgBits = ImageHelper.jpegFromImage(bufferedImage);
} catch (IOException e) { } catch (IOException e) {
s_logger.info("[ignored] read error on image tiles", e);
} }
return imgBits; return imgBits;
} }

View File

@ -23,9 +23,11 @@ import java.awt.image.DataBufferInt;
import java.io.DataInputStream; import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import com.cloud.consoleproxy.util.Logger;
import com.cloud.consoleproxy.vnc.VncScreenDescription; import com.cloud.consoleproxy.vnc.VncScreenDescription;
public class RawRect extends AbstractRect { public class RawRect extends AbstractRect {
private static final Logger s_logger = Logger.getLogger(RawRect.class);
private final int[] buf; private final int[] buf;
public RawRect(VncScreenDescription screen, int x, int y, int width, int height, DataInputStream is) throws IOException { public RawRect(VncScreenDescription screen, int x, int y, int width, int height, DataInputStream is) throws IOException {
@ -50,26 +52,27 @@ public class RawRect extends AbstractRect {
switch (dataBuf.getDataType()) { switch (dataBuf.getDataType()) {
case DataBuffer.TYPE_INT: { case DataBuffer.TYPE_INT: {
// We chose RGB888 model, so Raster will use DataBufferInt type // We chose RGB888 model, so Raster will use DataBufferInt type
DataBufferInt dataBuffer = (DataBufferInt)dataBuf; DataBufferInt dataBuffer = (DataBufferInt)dataBuf;
int imageWidth = image.getWidth(); int imageWidth = image.getWidth();
int imageHeight = image.getHeight(); int imageHeight = image.getHeight();
// Paint rectangle directly on buffer, line by line // Paint rectangle directly on buffer, line by line
int[] imageBuffer = dataBuffer.getData(); int[] imageBuffer = dataBuffer.getData();
for (int srcLine = 0, dstLine = y; srcLine < height && dstLine < imageHeight; srcLine++, dstLine++) { for (int srcLine = 0, dstLine = y; srcLine < height && dstLine < imageHeight; srcLine++, dstLine++) {
try { try {
System.arraycopy(buf, srcLine * width, imageBuffer, x + dstLine * imageWidth, width); System.arraycopy(buf, srcLine * width, imageBuffer, x + dstLine * imageWidth, width);
} catch (IndexOutOfBoundsException e) { } catch (IndexOutOfBoundsException e) {
} s_logger.info("[ignored] buffer overflow!?!", e);
} }
break;
} }
break;
}
default: default:
throw new RuntimeException("Unsupported data buffer in buffered image: expected data buffer of type int (DataBufferInt). Actual data buffer type: " + throw new RuntimeException("Unsupported data buffer in buffered image: expected data buffer of type int (DataBufferInt). Actual data buffer type: " +
dataBuf.getClass().getSimpleName()); dataBuf.getClass().getSimpleName());
} }
} }

View File

@ -89,6 +89,7 @@ public class UsageServer implements Daemon {
try { try {
Log4jConfigurer.initLogging(file.getAbsolutePath()); Log4jConfigurer.initLogging(file.getAbsolutePath());
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
s_logger.info("[ignored] log initialisation ;)" + e.getLocalizedMessage(), e);
} }
DOMConfigurator.configureAndWatch(file.getAbsolutePath()); DOMConfigurator.configureAndWatch(file.getAbsolutePath());
@ -99,6 +100,7 @@ public class UsageServer implements Daemon {
try { try {
Log4jConfigurer.initLogging(file.getAbsolutePath()); Log4jConfigurer.initLogging(file.getAbsolutePath());
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
s_logger.info("[ignored] log properties initialization :)" + e.getLocalizedMessage(), e);
} }
PropertyConfigurator.configureAndWatch(file.getAbsolutePath()); PropertyConfigurator.configureAndWatch(file.getAbsolutePath());
} }

View File

@ -0,0 +1,36 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.utils;
import org.apache.log4j.Logger;
public class AutoCloseableUtil {
private final static Logger s_logger = Logger.getLogger(AutoCloseableUtil.class);
public static void closeAutoCloseable(AutoCloseable ac, String message) {
try {
if (ac != null) {
ac.close();
}
} catch (Exception e) {
s_logger.warn("[ignored] " + message, e);
}
}
}

View File

@ -271,30 +271,4 @@ public class DateUtil {
return (dateCalendar1.getTimeInMillis() - dateCalendar2.getTimeInMillis() )/1000; return (dateCalendar1.getTimeInMillis() - dateCalendar2.getTimeInMillis() )/1000;
} }
// test only
public static void main(String[] args) {
TimeZone localTimezone = Calendar.getInstance().getTimeZone();
TimeZone gmtTimezone = TimeZone.getTimeZone("GMT");
TimeZone estTimezone = TimeZone.getTimeZone("EST");
Date time = new Date();
System.out.println("local time :" + getDateDisplayString(localTimezone, time));
System.out.println("GMT time :" + getDateDisplayString(gmtTimezone, time));
System.out.println("EST time :" + getDateDisplayString(estTimezone, time));
//Test next run time. Expects interval and schedule as arguments
if (args.length == 2) {
System.out.println("Next run time: " + getNextRunTime(IntervalType.getIntervalType(args[0]), args[1], "GMT", time).toString());
}
time = new Date();
DateFormat dfDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'Z");
String str = dfDate.format(time);
System.out.println("Formated TZ time string : " + str);
try {
Date dtParsed = DateUtil.parseTZDateString(str);
System.out.println("Parsed TZ time string : " + dtParsed.toString());
} catch (ParseException e) {
}
}
} }

View File

@ -19,7 +19,7 @@
package com.cloud.utils.net; package com.cloud.utils.net;
import com.cloud.utils.NumbersUtil; import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.File; import java.io.File;
@ -29,12 +29,17 @@ import java.net.InetAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.util.Formatter; import java.util.Formatter;
import org.apache.log4j.Logger;
import com.cloud.utils.NumbersUtil;
/** /**
* copied from the public domain utility from John Burkard. * copied from the public domain utility from John Burkard.
* @author <a href="mailto:jb@eaio.com">Johann Burkard</a> * @author <a href="mailto:jb@eaio.com">Johann Burkard</a>
* @version 2.1.3 * @version 2.1.3
**/ **/
public class MacAddress { public class MacAddress {
private static final Logger s_logger = Logger.getLogger(MacAddress.class);
private long _addr = 0; private long _addr = 0;
protected MacAddress() { protected MacAddress() {
@ -124,23 +129,14 @@ public class MacAddress {
} }
} catch (SecurityException ex) { } catch (SecurityException ex) {
s_logger.info("[ignored] security exception in static initializer of MacAddress", ex);
} catch (IOException ex) { } catch (IOException ex) {
s_logger.info("[ignored] io exception in static initializer of MacAddress");
} finally { } finally {
if (p != null) { if (p != null) {
if (in != null) { closeAutoCloseable(in, "closing init process input stream");
try { closeAutoCloseable(p.getErrorStream(), "closing init process error output stream");
in.close(); closeAutoCloseable(p.getOutputStream(), "closing init process std output stream");
} catch (IOException ex) {
}
}
try {
p.getErrorStream().close();
} catch (IOException ex) {
}
try {
p.getOutputStream().close();
} catch (IOException ex) {
}
p.destroy(); p.destroy();
} }
} }
@ -184,20 +180,9 @@ public class MacAddress {
return reader.readLine(); return reader.readLine();
} finally { } finally {
if (p != null) { if (p != null) {
if (reader != null) { closeAutoCloseable(reader, "closing process input stream");
try { closeAutoCloseable(p.getErrorStream(), "closing process error output stream");
reader.close(); closeAutoCloseable(p.getOutputStream(), "closing process std output stream");
} catch (IOException ex) {
}
}
try {
p.getErrorStream().close();
} catch (IOException ex) {
}
try {
p.getOutputStream().close();
} catch (IOException ex) {
}
p.destroy(); p.destroy();
} }
} }

View File

@ -19,6 +19,8 @@
package com.cloud.utils.nio; package com.cloud.utils.nio;
import static com.cloud.utils.AutoCloseableUtil.closeAutoCloseable;
import java.io.IOException; import java.io.IOException;
import java.net.ConnectException; import java.net.ConnectException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
@ -41,10 +43,10 @@ import java.util.concurrent.TimeUnit;
import javax.net.ssl.SSLContext; import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLEngine;
import org.apache.cloudstack.utils.security.SSLUtils;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.cloudstack.utils.security.SSLUtils;
import com.cloud.utils.concurrency.NamedThreadFactory; import com.cloud.utils.concurrency.NamedThreadFactory;
/** /**
@ -208,11 +210,8 @@ public abstract class NioConnection implements Runnable {
if (s_logger.isTraceEnabled()) { if (s_logger.isTraceEnabled()) {
s_logger.trace("Socket " + socket + " closed on read. Probably -1 returned: " + e.getMessage()); s_logger.trace("Socket " + socket + " closed on read. Probably -1 returned: " + e.getMessage());
} }
try { closeAutoCloseable(socketChannel, "accepting socketChannel");
socketChannel.close(); closeAutoCloseable(socket, "opened socket");
socket.close();
} catch (IOException ignore) {
}
return; return;
} }
@ -334,6 +333,7 @@ public abstract class NioConnection implements Runnable {
try { try {
((SocketChannel)(todo.key)).close(); ((SocketChannel)(todo.key)).close();
} catch (IOException ignore) { } catch (IOException ignore) {
s_logger.info("[ignored] socket channel");
} finally { } finally {
Link link = (Link)todo.att; Link link = (Link)todo.att;
link.terminated(); link.terminated();
@ -420,6 +420,7 @@ public abstract class NioConnection implements Runnable {
channel.close(); channel.close();
} }
} catch (IOException ignore) { } catch (IOException ignore) {
s_logger.info("[ignored] channel");
} }
} }
} }

View File

@ -19,11 +19,11 @@
package com.cloud.utils.script; package com.cloud.utils.script;
import org.apache.log4j.Logger;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.IOException; import java.io.IOException;
import org.apache.log4j.Logger;
/** /**
*/ */
public abstract class OutputInterpreter { public abstract class OutputInterpreter {
@ -50,6 +50,7 @@ public abstract class OutputInterpreter {
}; };
public static class TimedOutLogger extends OutputInterpreter { public static class TimedOutLogger extends OutputInterpreter {
private static final Logger s_logger = Logger.getLogger(TimedOutLogger.class);
Process _process; Process _process;
public TimedOutLogger(Process process) { public TimedOutLogger(Process process) {
@ -76,6 +77,7 @@ public abstract class OutputInterpreter {
buff.append(reader.readLine()); buff.append(reader.readLine());
} }
} catch (IOException e) { } catch (IOException e) {
s_logger.info("[ignored] can not append line to buffer",e);
} }
return buff.toString(); return buff.toString();

View File

@ -26,7 +26,11 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.log4j.Logger;
public class Decoder { public class Decoder {
private static final Logger s_logger = Logger.getLogger(Decoder.class);
private static Map<String, String> getParameters(URI uri) { private static Map<String, String> getParameters(URI uri) {
String parameters = uri.getQuery(); String parameters = uri.getQuery();
Map<String, String> params = new HashMap<String, String>(); Map<String, String> params = new HashMap<String, String>();
@ -52,7 +56,7 @@ public class Decoder {
try { try {
size = Long.parseLong(params.get(EncodingType.SIZE.toString())); size = Long.parseLong(params.get(EncodingType.SIZE.toString()));
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
s_logger.info("[ignored] number not recognised",e);
} }
DecodedDataObject obj = DecodedDataObject obj =
new DecodedDataObject(params.get(EncodingType.OBJTYPE.toString()), size, params.get(EncodingType.NAME.toString()), params.get(EncodingType.PATH.toString()), new DecodedDataObject(params.get(EncodingType.OBJTYPE.toString()), size, params.get(EncodingType.NAME.toString()), params.get(EncodingType.PATH.toString()),

View File

@ -0,0 +1,60 @@
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
//
package com.cloud.utils;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import java.util.TimeZone;
import com.cloud.utils.DateUtil.IntervalType;
public class DateUtilTest {
// command line test tool
public static void main(String[] args) {
TimeZone localTimezone = Calendar.getInstance().getTimeZone();
TimeZone gmtTimezone = TimeZone.getTimeZone("GMT");
TimeZone estTimezone = TimeZone.getTimeZone("EST");
Date time = new Date();
System.out.println("local time :" + DateUtil.getDateDisplayString(localTimezone, time));
System.out.println("GMT time :" + DateUtil.getDateDisplayString(gmtTimezone, time));
System.out.println("EST time :" + DateUtil.getDateDisplayString(estTimezone, time));
//Test next run time. Expects interval and schedule as arguments
if (args.length == 2) {
System.out.println("Next run time: " + DateUtil.getNextRunTime(IntervalType.getIntervalType(args[0]), args[1], "GMT", time).toString());
}
time = new Date();
DateFormat dfDate = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss'Z'Z");
String str = dfDate.format(time);
System.out.println("Formated TZ time string : " + str);
try {
Date dtParsed = DateUtil.parseTZDateString(str);
System.out.println("Parsed TZ time string : " + dtParsed.toString());
} catch (ParseException e) {
System.err.println("Parsing failed\n string : " + str + "\nexception :" + e.getLocalizedMessage());
}
}
}

View File

@ -19,6 +19,7 @@
package com.cloud.utils.exception; package com.cloud.utils.exception;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
@ -35,6 +36,7 @@ public class ExceptionUtilTest {
ExceptionUtil.rethrow(fnfe, IOException.class); ExceptionUtil.rethrow(fnfe, IOException.class);
fail(); fail();
} catch (IOException e) { } catch (IOException e) {
assertTrue("we won !?!", true);
} }
ExceptionUtil.rethrow(fnfe, ClassNotFoundException.class); ExceptionUtil.rethrow(fnfe, ClassNotFoundException.class);
@ -43,6 +45,7 @@ public class ExceptionUtilTest {
ExceptionUtil.rethrow(fnfe, FileNotFoundException.class); ExceptionUtil.rethrow(fnfe, FileNotFoundException.class);
fail(); fail();
} catch (FileNotFoundException e) { } catch (FileNotFoundException e) {
assertTrue("we won !?!", true);
} }
} }

View File

@ -31,7 +31,7 @@ import org.apache.log4j.Logger;
public class SnapshotDescriptor { public class SnapshotDescriptor {
private static final Logger s_logger = Logger.getLogger(SnapshotDescriptor.class); private static final Logger s_logger = Logger.getLogger(SnapshotDescriptor.class);
private Properties _properties = new Properties(); private final Properties _properties = new Properties();
public SnapshotDescriptor() { public SnapshotDescriptor() {
} }
@ -90,11 +90,9 @@ public class SnapshotDescriptor {
} }
public byte[] getVmsdContent() { public byte[] getVmsdContent() {
BufferedWriter out = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream(); ByteArrayOutputStream bos = new ByteArrayOutputStream();
try { try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(bos, "UTF-8"));) {
out = new BufferedWriter(new OutputStreamWriter(bos, "UTF-8"));
out.write(".encoding = \"UTF-8\""); out.write(".encoding = \"UTF-8\"");
out.newLine(); out.newLine();
@ -165,13 +163,6 @@ public class SnapshotDescriptor {
} catch (IOException e) { } catch (IOException e) {
assert (false); assert (false);
s_logger.error("Unexpected exception ", e); s_logger.error("Unexpected exception ", e);
} finally {
if (out != null) {
try {
out.close();
} catch (IOException e) {
}
}
} }
return bos.toByteArray(); return bos.toByteArray();
@ -288,8 +279,8 @@ public class SnapshotDescriptor {
} }
public static class DiskInfo { public static class DiskInfo {
private String _diskFileName; private final String _diskFileName;
private String _deviceName; private final String _deviceName;
public DiskInfo(String diskFileName, String deviceName) { public DiskInfo(String diskFileName, String deviceName) {
_diskFileName = diskFileName; _diskFileName = diskFileName;