mirror of
https://github.com/apache/cloudstack.git
synced 2025-10-26 08:42:29 +01:00
Support user resource name / displaytext with emoji, unicode chars, and some sql exception msg improvements (#9340)
* Added API arg validator for RFC compliance domain name, to validate VM's host name * Added unit tests for vm host/domain name validation * Don't send sql exception/query from dao to upper layer, log it and send only the error message * Updated user resources name / displayname(/text) column's charset to utf8mb4 to support emojis / unicode chars * Check and update char set for affinity group name to utf8mb4, from the data migration in upgrade path * Added smoke test to check resource name for vm, volume, service & disk offering, template, iso, account(first/lastname) * Updated resource annotation charset to utf8mb4 * Updated some resources description charset to utf8mb4 * Updated sql stmt with constant * Updated modify columns char set with idempotent procedure * Removed delimiter (for creating procedures)
This commit is contained in:
parent
47a6b7011d
commit
161d156af1
@ -32,4 +32,9 @@ public enum ApiArgValidator {
|
||||
* Validates if the parameter is an UUID with the method {@link UuidUtils#isUuid(String)}.
|
||||
*/
|
||||
UuidString,
|
||||
|
||||
/**
|
||||
* Validates if the parameter is a valid RFC Compliance domain name.
|
||||
*/
|
||||
RFCComplianceDomainName,
|
||||
}
|
||||
|
||||
@ -31,6 +31,7 @@ import org.apache.cloudstack.acl.RoleType;
|
||||
import org.apache.cloudstack.affinity.AffinityGroupResponse;
|
||||
import org.apache.cloudstack.api.ACL;
|
||||
import org.apache.cloudstack.api.APICommand;
|
||||
import org.apache.cloudstack.api.ApiArgValidator;
|
||||
import org.apache.cloudstack.api.ApiCommandResourceType;
|
||||
import org.apache.cloudstack.api.ApiConstants;
|
||||
import org.apache.cloudstack.api.ApiConstants.IoDriverPolicy;
|
||||
@ -97,7 +98,7 @@ public class DeployVMCmd extends BaseAsyncCreateCustomIdCmd implements SecurityG
|
||||
@Parameter(name = ApiConstants.TEMPLATE_ID, type = CommandType.UUID, entityType = TemplateResponse.class, required = true, description = "the ID of the template for the virtual machine")
|
||||
private Long templateId;
|
||||
|
||||
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "host name for the virtual machine")
|
||||
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "host name for the virtual machine", validations = {ApiArgValidator.RFCComplianceDomainName})
|
||||
private String name;
|
||||
|
||||
@Parameter(name = ApiConstants.DISPLAY_NAME, type = CommandType.STRING, description = "an optional user generated name for the virtual machine")
|
||||
|
||||
@ -22,6 +22,8 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
import org.apache.cloudstack.api.ApiArgValidator;
|
||||
import org.apache.cloudstack.api.response.UserDataResponse;
|
||||
|
||||
import org.apache.cloudstack.acl.RoleType;
|
||||
@ -104,7 +106,7 @@ public class UpdateVMCmd extends BaseCustomIdCmd implements SecurityGroupAction,
|
||||
description = "true if VM contains XS/VMWare tools inorder to support dynamic scaling of VM cpu/memory. This can be updated only when dynamic scaling is enabled on template, service offering and the corresponding global setting")
|
||||
protected Boolean isDynamicallyScalable;
|
||||
|
||||
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "new host name of the vm. The VM has to be stopped/started for this update to take affect", since = "4.4")
|
||||
@Parameter(name = ApiConstants.NAME, type = CommandType.STRING, description = "new host name of the vm. The VM has to be stopped/started for this update to take affect", validations = {ApiArgValidator.RFCComplianceDomainName}, since = "4.4")
|
||||
private String name;
|
||||
|
||||
@Parameter(name = ApiConstants.INSTANCE_NAME, type = CommandType.STRING, description = "instance name of the user vm", since = "4.4", authorized = {RoleType.Admin})
|
||||
|
||||
@ -18,12 +18,16 @@ package com.cloud.upgrade.dao;
|
||||
|
||||
import java.io.InputStream;
|
||||
import java.sql.Connection;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
|
||||
import com.cloud.upgrade.SystemVmTemplateRegistration;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
|
||||
public class Upgrade41910to42000 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
|
||||
private SystemVmTemplateRegistration systemVmTemplateRegistration;
|
||||
private static final int MAX_INDEXED_CHARS_IN_CHAR_SET_UTF8MB4 = 191;
|
||||
|
||||
@Override
|
||||
public String[] getUpgradableVersionRange() {
|
||||
@ -53,6 +57,7 @@ public class Upgrade41910to42000 extends DbUpgradeAbstractImpl implements DbUpgr
|
||||
|
||||
@Override
|
||||
public void performDataMigration(Connection conn) {
|
||||
checkAndUpdateAffinityGroupNameCharSetToUtf8mb4(conn);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -80,4 +85,32 @@ public class Upgrade41910to42000 extends DbUpgradeAbstractImpl implements DbUpgr
|
||||
throw new CloudRuntimeException("Failed to find / register SystemVM template(s)");
|
||||
}
|
||||
}
|
||||
|
||||
private void checkAndUpdateAffinityGroupNameCharSetToUtf8mb4(Connection conn) {
|
||||
logger.debug("Check and update char set for affinity group name to utf8mb4");
|
||||
try {
|
||||
PreparedStatement pstmt = conn.prepareStatement("SELECT MAX(LENGTH(name)) FROM `cloud`.`affinity_group`");
|
||||
ResultSet rs = pstmt.executeQuery();
|
||||
if (rs.next()) {
|
||||
long maxLengthOfName = rs.getLong(1);
|
||||
if (maxLengthOfName <= MAX_INDEXED_CHARS_IN_CHAR_SET_UTF8MB4) {
|
||||
pstmt = conn.prepareStatement(String.format("ALTER TABLE `cloud`.`affinity_group` MODIFY `name` VARCHAR(%d) CHARACTER SET utf8mb4 NOT NULL", MAX_INDEXED_CHARS_IN_CHAR_SET_UTF8MB4));
|
||||
pstmt.executeUpdate();
|
||||
logger.debug("Successfully updated char set for affinity group name to utf8mb4");
|
||||
} else {
|
||||
logger.warn("Unable to update char set for affinity group name, as there are some names with more than " + MAX_INDEXED_CHARS_IN_CHAR_SET_UTF8MB4 +
|
||||
" chars (max supported chars for index)");
|
||||
}
|
||||
}
|
||||
|
||||
if (rs != null && !rs.isClosed()) {
|
||||
rs.close();
|
||||
}
|
||||
if (pstmt != null && !pstmt.isClosed()) {
|
||||
pstmt.close();
|
||||
}
|
||||
} catch (final SQLException e) {
|
||||
logger.warn("Exception while updating char set for affinity group name to utf8mb4: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -151,6 +151,76 @@ WHERE
|
||||
name IN ("quota.usage.smtp.useStartTLS", "quota.usage.smtp.useAuth", "alert.smtp.useAuth", "project.smtp.useAuth")
|
||||
AND value NOT IN ("true", "y", "t", "1", "on", "yes");
|
||||
|
||||
|
||||
-- Quota inject tariff result into subsequent ones
|
||||
CALL `cloud_usage`.`IDEMPOTENT_ADD_COLUMN`('cloud_usage.quota_tariff', 'position', 'bigint(20) NOT NULL DEFAULT 1 COMMENT "Position in the execution sequence for tariffs of the same type"');
|
||||
|
||||
-- Idempotent IDEMPOTENT_MODIFY_COLUMN_CHAR_SET
|
||||
DROP PROCEDURE IF EXISTS `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`;
|
||||
CREATE PROCEDURE `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET` (
|
||||
IN in_table_name VARCHAR(200)
|
||||
, IN in_column_name VARCHAR(200)
|
||||
, IN in_column_type VARCHAR(200)
|
||||
, IN in_column_definition VARCHAR(1000)
|
||||
)
|
||||
BEGIN
|
||||
DECLARE CONTINUE HANDLER FOR 1060 BEGIN END; SET @ddl = CONCAT('ALTER TABLE ', in_table_name); SET @ddl = CONCAT(@ddl, ' ', ' MODIFY COLUMN') ; SET @ddl = CONCAT(@ddl, ' ', in_column_name); SET @ddl = CONCAT(@ddl, ' ', in_column_type); SET @ddl = CONCAT(@ddl, ' ', ' CHARACTER SET utf8mb4'); SET @ddl = CONCAT(@ddl, ' ', in_column_definition); PREPARE stmt FROM @ddl; EXECUTE stmt; DEALLOCATE PREPARE stmt; END;
|
||||
|
||||
DROP PROCEDURE IF EXISTS `cloud_usage`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`;
|
||||
CREATE PROCEDURE `cloud_usage`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET` (
|
||||
IN in_table_name VARCHAR(200)
|
||||
, IN in_column_name VARCHAR(200)
|
||||
, IN in_column_type VARCHAR(200)
|
||||
, IN in_column_definition VARCHAR(1000)
|
||||
)
|
||||
BEGIN
|
||||
DECLARE CONTINUE HANDLER FOR 1060 BEGIN END; SET @ddl = CONCAT('ALTER TABLE ', in_table_name); SET @ddl = CONCAT(@ddl, ' ', ' MODIFY COLUMN') ; SET @ddl = CONCAT(@ddl, ' ', in_column_name); SET @ddl = CONCAT(@ddl, ' ', in_column_type); SET @ddl = CONCAT(@ddl, ' ', ' CHARACTER SET utf8mb4'); SET @ddl = CONCAT(@ddl, ' ', in_column_definition); PREPARE stmt FROM @ddl; EXECUTE stmt; DEALLOCATE PREPARE stmt; END;
|
||||
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('async_job', 'job_result', 'TEXT', 'COMMENT \'job result info\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('async_job', 'job_cmd_info', 'TEXT', 'COMMENT \'command parameter info\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('event', 'description', 'VARCHAR(1024)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('usage_event', 'resource_name', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud_usage`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('usage_event', 'resource_name', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('account', 'account_name', 'VARCHAR(100)', 'DEFAULT NULL COMMENT \'an account name set by the creator of the account, defaults to username for single accounts\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('affinity_group', 'description', 'VARCHAR(4096)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('annotations', 'annotation', 'TEXT', '');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('autoscale_vmgroups', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'name of the autoscale vm group\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('backup_offering', 'name', 'VARCHAR(255)', 'NOT NULL COMMENT \'backup offering name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('backup_offering', 'description', 'VARCHAR(255)', 'NOT NULL COMMENT \'backup offering description\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('disk_offering', 'name', 'VARCHAR(255)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('disk_offering', 'unique_name', 'VARCHAR(32)', 'DEFAULT NULL COMMENT \'unique name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('disk_offering', 'display_text', 'VARCHAR(4096)', 'DEFAULT NULL COMMENT \'Optional text set by the admin for display purpose only\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('instance_group', 'name', 'VARCHAR(255)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('kubernetes_cluster', 'name', 'VARCHAR(255)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('kubernetes_cluster', 'description', 'VARCHAR(4096)', 'DEFAULT NULL COMMENT \'display text for this Kubernetes cluster\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('kubernetes_supported_version', 'name', 'VARCHAR(255)', 'NOT NULL COMMENT \'the name of this Kubernetes version\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('network_offerings', 'name', 'VARCHAR(64)', 'DEFAULT NULL COMMENT \'name of the network offering\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('network_offerings', 'unique_name', 'VARCHAR(64)', 'DEFAULT NULL COMMENT \'unique name of the network offering\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('network_offerings', 'display_text', 'VARCHAR(255)', 'NOT NULL COMMENT \'text to display to users\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('networks', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'name for this network\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('networks', 'display_text', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'display text for this network\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('project_role', 'description', 'TEXT', 'COMMENT \'description of the project role\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('projects', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'project name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('projects', 'display_text', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'project display text\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('roles', 'description', 'TEXT', 'COMMENT \'description of the role\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('service_offering', 'name', 'VARCHAR(255)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('service_offering', 'unique_name', 'VARCHAR(32)', 'DEFAULT NULL COMMENT \'unique name for offerings\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('service_offering', 'display_text', 'VARCHAR(4096)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('snapshots', 'name', 'VARCHAR(255)', 'NOT NULL COMMENT \'snapshot name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('ssh_keypairs', 'keypair_name', 'VARCHAR(256)', 'NOT NULL COMMENT \'name of the key pair\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('user_vm', 'display_name', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('user_vm_details', 'value', 'VARCHAR(5120)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('user', 'firstname', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('user', 'lastname', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('user_data', 'name', 'VARCHAR(256)', 'NOT NULL COMMENT \'name of the user data\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vm_instance', 'display_name', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vm_snapshots', 'display_name', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vm_snapshots', 'description', 'VARCHAR(255)', 'DEFAULT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vm_template', 'name', 'VARCHAR(255)', 'NOT NULL');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vm_template', 'display_text', 'VARCHAR(4096)', 'DEFAULT NULL COMMENT \'Description text set by the admin for display purpose only\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('volumes', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'A user specified name for the volume\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vpc', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'vpc name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vpc', 'display_text', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'vpc display text\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vpc_offerings', 'name', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'vpc offering name\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vpc_offerings', 'unique_name', 'VARCHAR(64)', 'DEFAULT NULL COMMENT \'unique name of the vpc offering\'');
|
||||
CALL `cloud`.`IDEMPOTENT_MODIFY_COLUMN_CHAR_SET`('vpc_offerings', 'display_text', 'VARCHAR(255)', 'DEFAULT NULL COMMENT \'display text\'');
|
||||
|
||||
@ -437,9 +437,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return result;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to find on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -522,9 +524,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
|
||||
return results;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to find on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -870,8 +874,9 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
ub.clear();
|
||||
return result;
|
||||
} catch (final SQLException e) {
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
handleEntityExistsException(e);
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to update on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1065,7 +1070,8 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
ResultSet rs = pstmt.executeQuery();
|
||||
return rs.next() ? toEntityBean(rs, true) : null;
|
||||
} catch (SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to find by id on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1180,9 +1186,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return result;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to execute on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1231,7 +1239,8 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return true;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to expunge on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1269,9 +1278,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return pstmt.executeUpdate();
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to expunge on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
@Override
|
||||
@ -1550,7 +1561,7 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
return entity;
|
||||
}
|
||||
|
||||
assert false : "Can't call persit if you don't have primary key";
|
||||
assert false : "Can't call persist if you don't have primary key";
|
||||
}
|
||||
|
||||
ID id = null;
|
||||
@ -1606,8 +1617,9 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
txn.commit();
|
||||
} catch (final SQLException e) {
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
handleEntityExistsException(e);
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to persist on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new CloudRuntimeException("Problem with getting the ec attribute ", e);
|
||||
} catch (IllegalAccessException e) {
|
||||
@ -1956,7 +1968,8 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
pstmt.executeUpdate();
|
||||
txn.commit();
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to expunge on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1984,7 +1997,8 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return result > 0;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to unremove on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2027,7 +2041,8 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return result > 0;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to remove on DB, due to: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2175,9 +2190,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return 0;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to get count on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2234,9 +2251,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return 0;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception in executing: " + sql, e);
|
||||
logger.error("DB Exception in executing: " + sql, e);
|
||||
throw new CloudRuntimeException("Unable to get count on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught exception in : " + sql, e);
|
||||
logger.error("Caught exception in : " + sql, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -2309,9 +2328,11 @@ public abstract class GenericDaoBase<T, ID extends Serializable> extends Compone
|
||||
}
|
||||
return 0;
|
||||
} catch (final SQLException e) {
|
||||
throw new CloudRuntimeException("DB Exception on: " + pstmt, e);
|
||||
logger.error("DB Exception on: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Unable to get count on DB, due to: " + e.getLocalizedMessage());
|
||||
} catch (final Exception e) {
|
||||
throw new CloudRuntimeException("Caught: " + pstmt, e);
|
||||
logger.error("Caught: " + pstmt, e);
|
||||
throw new CloudRuntimeException("Caught error: " + e.getLocalizedMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -60,6 +60,7 @@ import com.cloud.utils.DateUtil;
|
||||
import com.cloud.utils.UuidUtils;
|
||||
import com.cloud.utils.db.EntityManager;
|
||||
import com.cloud.utils.exception.CloudRuntimeException;
|
||||
import com.cloud.utils.net.NetUtils;
|
||||
|
||||
public class ParamProcessWorker implements DispatchWorker {
|
||||
|
||||
@ -117,8 +118,21 @@ public class ParamProcessWorker implements DispatchWorker {
|
||||
}
|
||||
}
|
||||
|
||||
private void validateNameForRFCCompliance(final Object param, final String argName) {
|
||||
String value = String.valueOf(param);
|
||||
if (StringUtils.isBlank(value) || !NetUtils.verifyDomainNameLabel(value, true)) {
|
||||
String msg = "it can contain ASCII letters 'a' through 'z', the digits '0' through '9', "
|
||||
+ "and the hyphen ('-'), must be between 1 and 63 characters long, and can't start or end with \"-\" and can't start with digit";
|
||||
throwInvalidParameterValueException(argName, msg);
|
||||
}
|
||||
}
|
||||
|
||||
protected void throwInvalidParameterValueException(String argName) {
|
||||
throw new InvalidParameterValueException(String.format("Invalid value provided for API arg: %s", argName));
|
||||
throwInvalidParameterValueException(argName, null);
|
||||
}
|
||||
|
||||
protected void throwInvalidParameterValueException(String argName, String customMsg) {
|
||||
throw new InvalidParameterValueException(String.format("Invalid value provided for API arg: %s%s", argName, StringUtils.isBlank(customMsg)? "" : " - " + customMsg));
|
||||
}
|
||||
|
||||
private void validateField(final Object paramObj, final Parameter annotation) throws ServerApiException {
|
||||
@ -155,6 +169,12 @@ public class ParamProcessWorker implements DispatchWorker {
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case RFCComplianceDomainName:
|
||||
switch (annotation.type()) {
|
||||
case STRING:
|
||||
validateNameForRFCCompliance(paramObj, argName);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -165,14 +185,18 @@ public class ParamProcessWorker implements DispatchWorker {
|
||||
|
||||
final List<Field> cmdFields = cmd.getParamFields();
|
||||
|
||||
String commandName = cmd.getCommandName();
|
||||
if (commandName.endsWith(BaseCmd.RESPONSE_SUFFIX)) {
|
||||
commandName = cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8);
|
||||
}
|
||||
|
||||
for (final Field field : cmdFields) {
|
||||
final Parameter parameterAnnotation = field.getAnnotation(Parameter.class);
|
||||
final Object paramObj = params.get(parameterAnnotation.name());
|
||||
if (paramObj == null) {
|
||||
if (parameterAnnotation.required()) {
|
||||
throw new ServerApiException(ApiErrorCode.PARAM_ERROR, "Unable to execute API command " +
|
||||
cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8) +
|
||||
" due to missing parameter " + parameterAnnotation.name());
|
||||
commandName + " due to missing parameter " + parameterAnnotation.name());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
@ -186,29 +210,28 @@ public class ParamProcessWorker implements DispatchWorker {
|
||||
setFieldValue(field, cmd, paramObj, parameterAnnotation);
|
||||
} catch (final IllegalArgumentException argEx) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Unable to execute API command " + cmd.getCommandName() + " due to invalid value " + paramObj + " for parameter " +
|
||||
logger.debug("Unable to execute API command " + commandName + " due to invalid value " + paramObj + " for parameter " +
|
||||
parameterAnnotation.name());
|
||||
}
|
||||
throw new ServerApiException(ApiErrorCode.PARAM_ERROR, "Unable to execute API command " +
|
||||
cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8) + " due to invalid value " + paramObj + " for parameter " +
|
||||
commandName + " due to invalid value " + paramObj + " for parameter " +
|
||||
parameterAnnotation.name());
|
||||
} catch (final ParseException parseEx) {
|
||||
if (logger.isDebugEnabled()) {
|
||||
logger.debug("Invalid date parameter " + paramObj + " passed to command " + cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8));
|
||||
logger.debug("Invalid date parameter " + paramObj + " passed to command " + commandName);
|
||||
}
|
||||
throw new ServerApiException(ApiErrorCode.PARAM_ERROR, "Unable to parse date " + paramObj + " for command " +
|
||||
cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8) + ", please pass dates in the format mentioned in the api documentation");
|
||||
commandName + ", please pass dates in the format mentioned in the api documentation");
|
||||
} catch (final InvalidParameterValueException invEx) {
|
||||
throw new ServerApiException(ApiErrorCode.PARAM_ERROR, "Unable to execute API command " +
|
||||
cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8) + " due to invalid value. " + invEx.getMessage());
|
||||
commandName + " due to invalid value. " + invEx.getMessage());
|
||||
} catch (final CloudRuntimeException cloudEx) {
|
||||
logger.error("CloudRuntimeException", cloudEx);
|
||||
// FIXME: Better error message? This only happens if the API command is not executable, which typically
|
||||
//means
|
||||
// there was
|
||||
// and IllegalAccessException setting one of the parameters.
|
||||
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Internal error executing API command " +
|
||||
cmd.getCommandName().substring(0, cmd.getCommandName().length() - 8));
|
||||
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Internal error executing API command " + commandName);
|
||||
}
|
||||
|
||||
//check access on the resource this field points to
|
||||
|
||||
@ -26,6 +26,7 @@ import com.cloud.exception.ResourceUnavailableException;
|
||||
import com.cloud.user.Account;
|
||||
import com.cloud.user.AccountManager;
|
||||
import com.cloud.user.User;
|
||||
import org.apache.cloudstack.api.ApiArgValidator;
|
||||
import org.apache.cloudstack.api.BaseCmd;
|
||||
import org.apache.cloudstack.api.Parameter;
|
||||
import org.apache.cloudstack.api.ServerApiException;
|
||||
@ -63,6 +64,9 @@ public class ParamProcessWorkerTest {
|
||||
@Parameter(name = "doubleparam1", type = CommandType.DOUBLE)
|
||||
double doubleparam1;
|
||||
|
||||
@Parameter(name = "vmHostNameParam", type = CommandType.STRING, validations = {ApiArgValidator.RFCComplianceDomainName})
|
||||
String vmHostNameParam;
|
||||
|
||||
@Override
|
||||
public void execute() throws ResourceUnavailableException, InsufficientCapacityException, ServerApiException, ConcurrentOperationException,
|
||||
ResourceAllocationException, NetworkRuleConflictException {
|
||||
@ -100,11 +104,44 @@ public class ParamProcessWorkerTest {
|
||||
params.put("intparam1", "100");
|
||||
params.put("boolparam1", "true");
|
||||
params.put("doubleparam1", "11.89");
|
||||
params.put("vmHostNameParam", "test-host-name-123");
|
||||
final TestCmd cmd = new TestCmd();
|
||||
paramProcessWorker.processParameters(cmd, params);
|
||||
Assert.assertEquals("foo", cmd.strparam1);
|
||||
Assert.assertEquals(100, cmd.intparam1);
|
||||
Assert.assertTrue(Double.compare(cmd.doubleparam1, 11.89) == 0);
|
||||
Assert.assertEquals("test-host-name-123", cmd.vmHostNameParam);
|
||||
}
|
||||
|
||||
@Test(expected = ServerApiException.class)
|
||||
public void processVmHostNameParameter_CannotStartWithDigit() {
|
||||
final HashMap<String, String> params = new HashMap<String, String>();
|
||||
params.put("vmHostNameParam", "123test");
|
||||
final TestCmd cmd = new TestCmd();
|
||||
paramProcessWorker.processParameters(cmd, params);
|
||||
}
|
||||
|
||||
@Test(expected = ServerApiException.class)
|
||||
public void processVmHostNameParameter_CannotStartWithHypen() {
|
||||
final HashMap<String, String> params = new HashMap<String, String>();
|
||||
params.put("vmHostNameParam", "-test");
|
||||
final TestCmd cmd = new TestCmd();
|
||||
paramProcessWorker.processParameters(cmd, params);
|
||||
}
|
||||
|
||||
@Test(expected = ServerApiException.class)
|
||||
public void processVmHostNameParameter_CannotEndWithHypen() {
|
||||
final HashMap<String, String> params = new HashMap<String, String>();
|
||||
params.put("vmHostNameParam", "test-");
|
||||
final TestCmd cmd = new TestCmd();
|
||||
paramProcessWorker.processParameters(cmd, params);
|
||||
}
|
||||
|
||||
@Test(expected = ServerApiException.class)
|
||||
public void processVmHostNameParameter_NotMoreThan63Chars() {
|
||||
final HashMap<String, String> params = new HashMap<String, String>();
|
||||
params.put("vmHostNameParam", "test-f2405112-d5a1-47c1-9f00-976909e3a6d3-1e6f3264-955ee76011a99");
|
||||
final TestCmd cmd = new TestCmd();
|
||||
paramProcessWorker.processParameters(cmd, params);
|
||||
}
|
||||
}
|
||||
|
||||
299
test/integration/smoke/test_resource_names.py
Normal file
299
test/integration/smoke/test_resource_names.py
Normal file
@ -0,0 +1,299 @@
|
||||
# -- coding: utf-8 --
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
""" BVT tests for resource names with emojis / unicode
|
||||
"""
|
||||
from marvin.cloudstackTestCase import cloudstackTestCase
|
||||
|
||||
from marvin.lib.base import (Account,
|
||||
ServiceOffering,
|
||||
VirtualMachine,
|
||||
Template,
|
||||
Iso,
|
||||
Volume,
|
||||
DiskOffering)
|
||||
from marvin.lib.common import (get_domain,
|
||||
get_zone,
|
||||
get_suitable_test_template,
|
||||
get_builtin_template_info)
|
||||
from marvin.codes import FAILED
|
||||
from nose.plugins.attrib import attr
|
||||
# Import System modules
|
||||
import time
|
||||
|
||||
_multiprocess_shared_ = True
|
||||
|
||||
class TestResourceNames(cloudstackTestCase):
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
testClient = super(TestResourceNames, cls).getClsTestClient()
|
||||
cls.apiclient = testClient.getApiClient()
|
||||
cls.services = testClient.getParsedTestDataConfig()
|
||||
# Get Zone, Domain and templates
|
||||
cls.domain = get_domain(cls.apiclient)
|
||||
cls.zone = get_zone(cls.apiclient, testClient.getZoneForTests())
|
||||
cls.hypervisor = testClient.getHypervisorInfo()
|
||||
cls.services['mode'] = cls.zone.networktype
|
||||
cls._cleanup = []
|
||||
|
||||
template = get_suitable_test_template(
|
||||
cls.apiclient,
|
||||
cls.zone.id,
|
||||
cls.services["ostype"],
|
||||
cls.hypervisor
|
||||
)
|
||||
if template == FAILED:
|
||||
assert False, "get_suitable_test_template() failed to return template with description %s" % cls.services[
|
||||
"ostype"]
|
||||
|
||||
# Set Zones and disk offerings
|
||||
cls.services["domainid"] = cls.domain.id
|
||||
cls.services["zoneid"] = cls.zone.id
|
||||
cls.services["template"] = template.id
|
||||
cls.services["iso1"]["zoneid"] = cls.zone.id
|
||||
cls.services["small"]["zoneid"] = cls.zone.id
|
||||
cls.services["small"]["template"] = template.id
|
||||
|
||||
cls.services["account"]["firstname"] = "test🎉"
|
||||
cls.services["account"]["lastname"] = "account🙂"
|
||||
cls.account = Account.create(
|
||||
cls.apiclient,
|
||||
cls.services["account"],
|
||||
domainid=cls.domain.id
|
||||
)
|
||||
cls._cleanup.append(cls.account)
|
||||
|
||||
cls.services["service_offerings"]["tiny"]["name"] = "test🎉svcoffering🙂"
|
||||
cls.service_offering = ServiceOffering.create(
|
||||
cls.apiclient,
|
||||
cls.services["service_offerings"]["tiny"]
|
||||
)
|
||||
cls._cleanup.append(cls.service_offering)
|
||||
|
||||
cls.services["disk_offering"]["name"] = "test🎉diskoffering🙂"
|
||||
cls.disk_offering = DiskOffering.create(
|
||||
cls.apiclient,
|
||||
cls.services["disk_offering"]
|
||||
)
|
||||
cls._cleanup.append(cls.disk_offering)
|
||||
|
||||
cls.services["small"]["displayname"] = "test🎉vm🙂"
|
||||
cls.virtual_machine = VirtualMachine.create(
|
||||
cls.apiclient,
|
||||
cls.services["small"],
|
||||
accountid=cls.account.name,
|
||||
domainid=cls.account.domainid,
|
||||
serviceofferingid=cls.service_offering.id,
|
||||
mode=cls.services['mode']
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
super(TestResourceNames, cls).tearDownClass()
|
||||
|
||||
def setUp(self):
|
||||
self.apiclient = self.testClient.getApiClient()
|
||||
self.dbclient = self.testClient.getDbConnection()
|
||||
self.cleanup = []
|
||||
|
||||
def tearDown(self):
|
||||
super(TestResourceNames, self).tearDown()
|
||||
|
||||
@attr(tags=["advanced", "smoke", "basic"], required_hardware="false")
|
||||
def test_01_deploy_vm(self):
|
||||
"""Test for deploy virtual machine
|
||||
"""
|
||||
# Validate the following:
|
||||
# 1. listVirtualMachines returns accurate information, and check name
|
||||
list_vm_response = VirtualMachine.list(
|
||||
self.apiclient,
|
||||
id=self.virtual_machine.id
|
||||
)
|
||||
|
||||
self.debug(
|
||||
"Verify listVirtualMachines response for virtual machine: %s" \
|
||||
% self.virtual_machine.id
|
||||
)
|
||||
self.assertEqual(
|
||||
isinstance(list_vm_response, list),
|
||||
True,
|
||||
"Check list response returns a valid list"
|
||||
)
|
||||
self.assertNotEqual(
|
||||
len(list_vm_response),
|
||||
0,
|
||||
"Check VM available in List Virtual Machines"
|
||||
)
|
||||
|
||||
vm_response = list_vm_response[0]
|
||||
self.assertEqual(
|
||||
vm_response.id,
|
||||
self.virtual_machine.id,
|
||||
"Check virtual machine id in listVirtualMachines"
|
||||
)
|
||||
self.assertEqual(
|
||||
vm_response.name,
|
||||
self.virtual_machine.name,
|
||||
"Check virtual machine name in listVirtualMachines"
|
||||
)
|
||||
self.assertEqual(
|
||||
vm_response.displayname,
|
||||
self.virtual_machine.displayname,
|
||||
"Check virtual machine display name in listVirtualMachines"
|
||||
)
|
||||
self.assertEqual(
|
||||
vm_response.state,
|
||||
'Running',
|
||||
msg="VM is not in Running state"
|
||||
)
|
||||
return
|
||||
|
||||
@attr(tags=["advanced", "smoke", "basic"], required_hardware="true")
|
||||
def test_02_create_volume(self):
|
||||
"""Test for create volume
|
||||
"""
|
||||
# Validate the following:
|
||||
# 1. Create volume and check name
|
||||
|
||||
self.services["diskname"] = "test🎉data🙂volume"
|
||||
self.volume = Volume.create(
|
||||
self.apiclient,
|
||||
self.services,
|
||||
account=self.account.name,
|
||||
domainid=self.account.domainid,
|
||||
diskofferingid=self.disk_offering.id
|
||||
)
|
||||
# self.cleanup.append(self.volume)
|
||||
self.virtual_machine.attach_volume(self.apiclient, self.volume)
|
||||
list_volume_response = Volume.list(
|
||||
self.apiclient,
|
||||
id=self.volume.id
|
||||
)
|
||||
self.assertEqual(
|
||||
isinstance(list_volume_response, list),
|
||||
True,
|
||||
"Check list response returns a valid list"
|
||||
)
|
||||
self.assertNotEqual(
|
||||
list_volume_response,
|
||||
None,
|
||||
"Check if volume exists in ListVolumes"
|
||||
)
|
||||
|
||||
volume_response = list_volume_response[0]
|
||||
self.assertNotEqual(
|
||||
volume_response.virtualmachineid,
|
||||
None,
|
||||
"Check if volume state (attached) is reflected"
|
||||
)
|
||||
self.assertEqual(
|
||||
volume_response.name,
|
||||
self.volume.name,
|
||||
"Check virtual machine display name in listVirtualMachines"
|
||||
)
|
||||
|
||||
@attr(tags=["advanced", "smoke", "basic"], required_hardware="true")
|
||||
def test_03_register_template(self):
|
||||
"""Test for register template
|
||||
"""
|
||||
# Validate the following:
|
||||
# 1. Register template and check name
|
||||
|
||||
if self.hypervisor.lower() in ["lxc"]:
|
||||
self.skipTest("Skipping test, unsupported hypervisor %s" % self.hypervisor)
|
||||
|
||||
builtin_info = get_builtin_template_info(self.apiclient, self.zone.id)
|
||||
self.services["template_2"]["url"] = builtin_info[0]
|
||||
self.services["template_2"]["hypervisor"] = builtin_info[1]
|
||||
self.services["template_2"]["format"] = builtin_info[2]
|
||||
self.services["template_2"]["name"] = "test🎉tmpl🙂"
|
||||
self.services["template_2"]["displaytext"] = "test🎉tmpl🙂"
|
||||
|
||||
template = Template.register(self.apiclient,
|
||||
self.services["template_2"],
|
||||
zoneid=self.zone.id,
|
||||
account=self.account.name,
|
||||
domainid=self.account.domainid
|
||||
)
|
||||
self.debug("Successfully registered template with ID: %s" % template.id)
|
||||
self.cleanup.append(template)
|
||||
|
||||
# Get template response
|
||||
timeout = 600
|
||||
list_template_response = None
|
||||
while timeout >= 0:
|
||||
list_template_response = Template.list(self.apiclient,
|
||||
templatefilter=self.services["template_2"]["templatefilter"],
|
||||
id=template.id)
|
||||
|
||||
if list_template_response is not None and list_template_response[0].isready:
|
||||
break
|
||||
|
||||
time.sleep(30)
|
||||
timeout -= 30
|
||||
|
||||
template_response = list_template_response[0]
|
||||
self.assertEqual(
|
||||
template_response.displaytext,
|
||||
template.displaytext,
|
||||
"Check template displaytext in response"
|
||||
)
|
||||
|
||||
@attr(tags=["advanced", "smoke", "basic"], required_hardware="true")
|
||||
def test_04_register_iso(self):
|
||||
"""Test for register ISO
|
||||
"""
|
||||
# Validate the following:
|
||||
# 1. Register ISO and check name
|
||||
|
||||
if self.hypervisor.lower() in ["lxc"]:
|
||||
self.skipTest("Skipping test, unsupported hypervisor %s" % self.hypervisor)
|
||||
|
||||
self.services["iso1"]["displaytext"] = "test🎉iso🙂"
|
||||
self.services["iso1"]["name"] = "test🎉iso🙂"
|
||||
iso = Iso.create(
|
||||
self.apiclient,
|
||||
self.services["iso1"],
|
||||
account=self.account.name,
|
||||
domainid=self.account.domainid
|
||||
)
|
||||
self.debug("Successfully registered ISO with ID: %s" % iso.id)
|
||||
self.cleanup.append(iso)
|
||||
|
||||
# Get ISO response
|
||||
timeout = 600
|
||||
list_iso_response = None
|
||||
while timeout >= 0:
|
||||
list_iso_response = Iso.list(
|
||||
self.apiclient,
|
||||
isofilter="self",
|
||||
id=iso.id
|
||||
)
|
||||
|
||||
if list_iso_response is not None and list_iso_response[0].isready:
|
||||
break
|
||||
|
||||
time.sleep(30)
|
||||
timeout -= 30
|
||||
|
||||
iso_response = list_iso_response[0]
|
||||
self.assertEqual(
|
||||
iso_response.displaytext,
|
||||
iso.displaytext,
|
||||
"Check ISO displaytext in response"
|
||||
)
|
||||
Loading…
x
Reference in New Issue
Block a user