usage: Fix whitspace and indention according to code convention

Replaced all tabs by 4 spaces
This commit is contained in:
Wido den Hollander 2012-09-13 14:36:06 +02:00
parent 7beb045600
commit 5d41523a0b
13 changed files with 618 additions and 618 deletions

View File

@ -78,21 +78,21 @@ import com.cloud.utils.exception.CloudRuntimeException;
@Local(value={UsageManager.class})
public class UsageManagerImpl implements UsageManager, Runnable {
public static final Logger s_logger = Logger.getLogger(UsageManagerImpl.class.getName());
public static final Logger s_logger = Logger.getLogger(UsageManagerImpl.class.getName());
protected static final String DAILY = "DAILY";
protected static final String WEEKLY = "WEEKLY";
protected static final String MONTHLY = "MONTHLY";
protected static final String DAILY = "DAILY";
protected static final String WEEKLY = "WEEKLY";
protected static final String MONTHLY = "MONTHLY";
private static final int HOURLY_TIME = 60;
private static final int DAILY_TIME = 60 * 24;
private static final int THREE_DAYS_IN_MINUTES = 60 * 24 * 3;
private static final int USAGE_AGGREGATION_RANGE_MIN = 10;
private static final int HOURLY_TIME = 60;
private static final int DAILY_TIME = 60 * 24;
private static final int THREE_DAYS_IN_MINUTES = 60 * 24 * 3;
private static final int USAGE_AGGREGATION_RANGE_MIN = 10;
private final ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private final AccountDao m_accountDao = _locator.getDao(AccountDao.class);
private final UserStatisticsDao m_userStatsDao = _locator.getDao(UserStatisticsDao.class);
private final UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private final ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private final AccountDao m_accountDao = _locator.getDao(AccountDao.class);
private final UserStatisticsDao m_userStatsDao = _locator.getDao(UserStatisticsDao.class);
private final UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private final UsageVMInstanceDao m_usageInstanceDao = _locator.getDao(UsageVMInstanceDao.class);
private final UsageIPAddressDao m_usageIPAddressDao = _locator.getDao(UsageIPAddressDao.class);
private final UsageNetworkDao m_usageNetworkDao = _locator.getDao(UsageNetworkDao.class);
@ -108,24 +108,24 @@ public class UsageManagerImpl implements UsageManager, Runnable {
@Inject protected UsageEventDao _usageEventDao;
private String m_version = null;
private String m_name = null;
private final Calendar m_jobExecTime = Calendar.getInstance();
private int m_aggregationDuration = 0;
private int m_sanityCheckInterval = 0;
private String m_name = null;
private final Calendar m_jobExecTime = Calendar.getInstance();
private int m_aggregationDuration = 0;
private int m_sanityCheckInterval = 0;
String m_hostname = null;
int m_pid = 0;
TimeZone m_usageTimezone = TimeZone.getTimeZone("GMT");;
private final GlobalLock m_heartbeatLock = GlobalLock.getInternLock("usage.job.heartbeat.check");
private final ScheduledExecutorService m_executor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Job"));
private final ScheduledExecutorService m_heartbeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-HB"));
private final ScheduledExecutorService m_sanityExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Sanity"));
private Future m_scheduledFuture = null;
private Future m_heartbeat = null;
private Future m_sanity = null;
private final ScheduledExecutorService m_executor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Job"));
private final ScheduledExecutorService m_heartbeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-HB"));
private final ScheduledExecutorService m_sanityExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Sanity"));
private Future m_scheduledFuture = null;
private Future m_heartbeat = null;
private Future m_sanity = null;
protected UsageManagerImpl() {
}
protected UsageManagerImpl() {
}
private void mergeConfigs(Map<String, String> dbParams, Map<String, Object> xmlParams) {
for (Map.Entry<String, Object> param : xmlParams.entrySet()) {
@ -175,7 +175,7 @@ public class UsageManagerImpl implements UsageManager, Runnable {
}
if(aggreagationTimeZone != null && !aggreagationTimeZone.isEmpty()){
m_usageTimezone = TimeZone.getTimeZone(aggreagationTimeZone);
m_usageTimezone = TimeZone.getTimeZone(aggreagationTimeZone);
}
s_logger.debug("Usage stats aggregation time zone: "+aggreagationTimeZone);
@ -225,19 +225,19 @@ public class UsageManagerImpl implements UsageManager, Runnable {
}
m_pid = Integer.parseInt(System.getProperty("pid"));
return true;
}
}
public String getName() {
return m_name;
}
public String getName() {
return m_name;
}
public boolean start() {
if (s_logger.isInfoEnabled()) {
s_logger.info("Starting Usage Manager");
}
public boolean start() {
if (s_logger.isInfoEnabled()) {
s_logger.info("Starting Usage Manager");
}
// use the configured exec time and aggregation duration for scheduling the job
m_scheduledFuture = m_executor.scheduleAtFixedRate(this, m_jobExecTime.getTimeInMillis() - System.currentTimeMillis(), m_aggregationDuration * 60 * 1000, TimeUnit.MILLISECONDS);
// use the configured exec time and aggregation duration for scheduling the job
m_scheduledFuture = m_executor.scheduleAtFixedRate(this, m_jobExecTime.getTimeInMillis() - System.currentTimeMillis(), m_aggregationDuration * 60 * 1000, TimeUnit.MILLISECONDS);
m_heartbeat = m_heartbeatExecutor.scheduleAtFixedRate(new Heartbeat(), /* start in 15 seconds...*/15*1000, /* check database every minute*/60*1000, TimeUnit.MILLISECONDS);
@ -264,81 +264,81 @@ public class UsageManagerImpl implements UsageManager, Runnable {
usageTxn.close();
}
return true;
}
return true;
}
public boolean stop() {
m_heartbeat.cancel(true);
m_scheduledFuture.cancel(true);
if(m_sanity != null){
m_sanity.cancel(true);
}
return true;
}
public boolean stop() {
m_heartbeat.cancel(true);
m_scheduledFuture.cancel(true);
if(m_sanity != null){
m_sanity.cancel(true);
}
return true;
}
public void run() {
if (s_logger.isInfoEnabled()) {
s_logger.info("starting usage job...");
}
public void run() {
if (s_logger.isInfoEnabled()) {
s_logger.info("starting usage job...");
}
// how about we update the job exec time when the job starts???
long execTime = m_jobExecTime.getTimeInMillis();
long now = System.currentTimeMillis() + 2000; // 2 second buffer since jobs can run a little early (though usually just by milliseconds)
// how about we update the job exec time when the job starts???
long execTime = m_jobExecTime.getTimeInMillis();
long now = System.currentTimeMillis() + 2000; // 2 second buffer since jobs can run a little early (though usually just by milliseconds)
if (execTime < now) {
// if exec time is in the past, calculate the next time the job will execute...if this is a one-off job that is a result
// of scheduleParse() then don't update the next exec time...
m_jobExecTime.add(Calendar.MINUTE, m_aggregationDuration);
}
if (execTime < now) {
// if exec time is in the past, calculate the next time the job will execute...if this is a one-off job that is a result
// of scheduleParse() then don't update the next exec time...
m_jobExecTime.add(Calendar.MINUTE, m_aggregationDuration);
}
UsageJobVO job = m_usageJobDao.isOwner(m_hostname, m_pid);
if (job != null) {
// FIXME: we really need to do a better job of not missing any events...so we should some how
// keep track of the last time usage was run, then go from there...
// For executing the job, we treat hourly and daily as special time ranges, using the previous full hour or the previous
// full day. Otherwise we just subtract off the aggregation range from the current time and use that as start date with
// current time as end date.
Calendar cal = Calendar.getInstance(m_usageTimezone);
cal.setTime(new Date());
long startDate = 0;
long endDate = 0;
if (m_aggregationDuration == DAILY_TIME) {
cal.roll(Calendar.DAY_OF_YEAR, false);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
UsageJobVO job = m_usageJobDao.isOwner(m_hostname, m_pid);
if (job != null) {
// FIXME: we really need to do a better job of not missing any events...so we should some how
// keep track of the last time usage was run, then go from there...
// For executing the job, we treat hourly and daily as special time ranges, using the previous full hour or the previous
// full day. Otherwise we just subtract off the aggregation range from the current time and use that as start date with
// current time as end date.
Calendar cal = Calendar.getInstance(m_usageTimezone);
cal.setTime(new Date());
long startDate = 0;
long endDate = 0;
if (m_aggregationDuration == DAILY_TIME) {
cal.roll(Calendar.DAY_OF_YEAR, false);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.DAY_OF_YEAR, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else if (m_aggregationDuration == HOURLY_TIME) {
cal.roll(Calendar.HOUR_OF_DAY, false);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.DAY_OF_YEAR, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else if (m_aggregationDuration == HOURLY_TIME) {
cal.roll(Calendar.HOUR_OF_DAY, false);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MILLISECOND, 0);
startDate = cal.getTime().getTime();
cal.roll(Calendar.HOUR_OF_DAY, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else {
endDate = cal.getTime().getTime(); // current time
cal.add(Calendar.MINUTE, -1*m_aggregationDuration);
startDate = cal.getTime().getTime();
}
cal.roll(Calendar.HOUR_OF_DAY, true);
cal.add(Calendar.MILLISECOND, -1);
endDate = cal.getTime().getTime();
} else {
endDate = cal.getTime().getTime(); // current time
cal.add(Calendar.MINUTE, -1*m_aggregationDuration);
startDate = cal.getTime().getTime();
}
parse(job, startDate, endDate);
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Not owner of usage job, skipping...");
}
}
parse(job, startDate, endDate);
} else {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Not owner of usage job, skipping...");
}
}
if (s_logger.isInfoEnabled()) {
s_logger.info("usage job complete");
}
}
}
public void scheduleParse() {
if (s_logger.isDebugEnabled()) {
@ -347,12 +347,12 @@ public class UsageManagerImpl implements UsageManager, Runnable {
m_executor.schedule(this, 0, TimeUnit.MILLISECONDS);
}
public void parse(UsageJobVO job, long startDateMillis, long endDateMillis) {
public void parse(UsageJobVO job, long startDateMillis, long endDateMillis) {
// TODO: Shouldn't we also allow parsing by the type of usage?
boolean success = false;
long timeStart = System.currentTimeMillis();
long deleteOldStatsTimeMillis = 0L;
boolean success = false;
long timeStart = System.currentTimeMillis();
long deleteOldStatsTimeMillis = 0L;
try {
if ((endDateMillis == 0) || (endDateMillis > timeStart)) {
endDateMillis = timeStart;
@ -532,7 +532,7 @@ public class UsageManagerImpl implements UsageManager, Runnable {
// Keep track of user stats for an account, across all of its public IPs
Map<String, UserStatisticsVO> aggregatedStats = new HashMap<String, UserStatisticsVO>();
int startIndex = 0;
do {
do {
userStats = m_userStatsDao.listActiveAndRecentlyDeleted(recentlyDeletedDate, startIndex, 500);
if (userStats != null) {
@ -557,11 +557,11 @@ public class UsageManagerImpl implements UsageManager, Runnable {
// loop over the user stats, create delta entries in the usage_network helper table
int numAcctsProcessed = 0;
for (String key : aggregatedStats.keySet()) {
UsageNetworkVO currentNetworkStats = null;
UsageNetworkVO currentNetworkStats = null;
if (networkStats != null) {
currentNetworkStats = networkStats.get(key);
}
createNetworkHelperEntry(aggregatedStats.get(key), currentNetworkStats, endDateMillis);
numAcctsProcessed++;
}
@ -684,15 +684,15 @@ public class UsageManagerImpl implements UsageManager, Runnable {
swap.close();
}
} catch (Exception e) {
s_logger.error("Usage Manager error", e);
}
}
private boolean parseHelperTables(AccountVO account, Date currentStartDate, Date currentEndDate){
boolean parsed = false;
} catch (Exception e) {
s_logger.error("Usage Manager error", e);
}
}
private boolean parseHelperTables(AccountVO account, Date currentStartDate, Date currentEndDate){
boolean parsed = false;
parsed = VMInstanceUsageParser.parse(account, currentStartDate, currentEndDate);
parsed = VMInstanceUsageParser.parse(account, currentStartDate, currentEndDate);
if (s_logger.isDebugEnabled()) {
if (!parsed) {
s_logger.debug("vm usage instances successfully parsed? " + parsed + " (for account: " + account.getAccountName() + ", id: " + account.getId() + ")");
@ -761,25 +761,25 @@ public class UsageManagerImpl implements UsageManager, Runnable {
}
}
return parsed;
}
}
private void createHelperRecord(UsageEventVO event) {
String eventType = event.getType();
if (isVMEvent(eventType)) {
createVMHelperEvent(event);
} else if (isIPEvent(eventType)) {
createIPHelperEvent(event);
} else if (isVolumeEvent(eventType)) {
createVolumeHelperEvent(event);
} else if (isTemplateEvent(eventType)) {
createTemplateHelperEvent(event);
} else if (isISOEvent(eventType)) {
createISOHelperEvent(event);
} else if (isSnapshotEvent(eventType)) {
private void createHelperRecord(UsageEventVO event) {
String eventType = event.getType();
if (isVMEvent(eventType)) {
createVMHelperEvent(event);
} else if (isIPEvent(eventType)) {
createIPHelperEvent(event);
} else if (isVolumeEvent(eventType)) {
createVolumeHelperEvent(event);
} else if (isTemplateEvent(eventType)) {
createTemplateHelperEvent(event);
} else if (isISOEvent(eventType)) {
createISOHelperEvent(event);
} else if (isSnapshotEvent(eventType)) {
createSnapshotHelperEvent(event);
} else if (isLoadBalancerEvent(eventType)) {
createLoadBalancerHelperEvent(event);
} else if (isPortForwardingEvent(eventType)) {
} else if (isLoadBalancerEvent(eventType)) {
createLoadBalancerHelperEvent(event);
} else if (isPortForwardingEvent(eventType)) {
createPortForwardingHelperEvent(event);
} else if (isNetworkOfferingEvent(eventType)) {
createNetworkOfferingEvent(event);
@ -788,12 +788,12 @@ public class UsageManagerImpl implements UsageManager, Runnable {
} else if (isSecurityGroupEvent(eventType)) {
createSecurityGroupEvent(event);
}
}
}
private boolean isVMEvent(String eventType) {
if (eventType == null) return false;
return eventType.startsWith("VM.");
}
private boolean isVMEvent(String eventType) {
if (eventType == null) return false;
return eventType.startsWith("VM.");
}
private boolean isIPEvent(String eventType) {
if (eventType == null) return false;
@ -1085,21 +1085,21 @@ public class UsageManagerImpl implements UsageManager, Runnable {
UsageVolumeVO volumeVO = new UsageVolumeVO(volId, zoneId, event.getAccountId(), acct.getDomainId(), doId, templateId, size, event.getCreateDate(), null);
m_usageVolumeDao.persist(volumeVO);
} else if (EventTypes.EVENT_VOLUME_DELETE.equals(event.getType())) {
SearchCriteria<UsageVolumeVO> sc = m_usageVolumeDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("id", SearchCriteria.Op.EQ, volId);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageVolumeVO> volumesVOs = m_usageVolumeDao.search(sc, null);
if (volumesVOs.size() > 1) {
s_logger.warn("More that one usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageVolumeVO volumesVO : volumesVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId());
}
volumesVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageVolumeDao.update(volumesVO);
}
SearchCriteria<UsageVolumeVO> sc = m_usageVolumeDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("id", SearchCriteria.Op.EQ, volId);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageVolumeVO> volumesVOs = m_usageVolumeDao.search(sc, null);
if (volumesVOs.size() > 1) {
s_logger.warn("More that one usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageVolumeVO volumesVO : volumesVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId());
}
volumesVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageVolumeDao.update(volumesVO);
}
}
}
@ -1129,74 +1129,74 @@ public class UsageManagerImpl implements UsageManager, Runnable {
}
List<UsageStorageVO> storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), templateId, StorageTypes.TEMPLATE, zoneId);
if (storageVOs.size() > 0) {
s_logger.warn("Usage entry for Template: " + templateId + " assigned to account: " + event.getAccountId() + "already exists in zone "+zoneId);
return;
}
s_logger.warn("Usage entry for Template: " + templateId + " assigned to account: " + event.getAccountId() + "already exists in zone "+zoneId);
return;
}
Account acct = m_accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageStorageVO storageVO = new UsageStorageVO(templateId, zoneId, event.getAccountId(), acct.getDomainId(), StorageTypes.TEMPLATE, event.getTemplateId(),
templateSize, event.getCreateDate(), null);
templateSize, event.getCreateDate(), null);
m_usageStorageDao.persist(storageVO);
} else if (EventTypes.EVENT_TEMPLATE_DELETE.equals(event.getType())) {
List<UsageStorageVO> storageVOs;
if(zoneId != -1L){
storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), templateId, StorageTypes.TEMPLATE, zoneId);
storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), templateId, StorageTypes.TEMPLATE, zoneId);
} else {
storageVOs = m_usageStorageDao.listById(event.getAccountId(), templateId, StorageTypes.TEMPLATE);
}
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + templateId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting template: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageStorageDao.update(storageVO);
}
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + templateId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting template: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageStorageDao.update(storageVO);
}
}
}
private void createISOHelperEvent(UsageEventVO event) {
long isoSize = -1L;
long isoSize = -1L;
long isoId = event.getResourceId();
long zoneId = event.getZoneId();
if (EventTypes.EVENT_ISO_CREATE.equals(event.getType()) || EventTypes.EVENT_ISO_COPY.equals(event.getType())) {
isoSize = event.getSize();
}
long isoId = event.getResourceId();
long zoneId = event.getZoneId();
if (EventTypes.EVENT_ISO_CREATE.equals(event.getType()) || EventTypes.EVENT_ISO_COPY.equals(event.getType())) {
isoSize = event.getSize();
}
if (EventTypes.EVENT_ISO_CREATE.equals(event.getType()) || EventTypes.EVENT_ISO_COPY.equals(event.getType())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("create iso with id : " + isoId + " for account: " + event.getAccountId());
}
List<UsageStorageVO> storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), isoId, StorageTypes.ISO, zoneId);
if (EventTypes.EVENT_ISO_CREATE.equals(event.getType()) || EventTypes.EVENT_ISO_COPY.equals(event.getType())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("create iso with id : " + isoId + " for account: " + event.getAccountId());
}
List<UsageStorageVO> storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), isoId, StorageTypes.ISO, zoneId);
if (storageVOs.size() > 0) {
s_logger.warn("Usage entry for ISO: " + isoId + " assigned to account: " + event.getAccountId() + "already exists in zone "+zoneId);
return;
}
Account acct = m_accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageStorageVO storageVO = new UsageStorageVO( isoId, zoneId, event.getAccountId(), acct.getDomainId(), StorageTypes.ISO, null,
isoSize, event.getCreateDate(), null);
m_usageStorageDao.persist(storageVO);
} else if (EventTypes.EVENT_ISO_DELETE.equals(event.getType())) {
List<UsageStorageVO> storageVOs;
s_logger.warn("Usage entry for ISO: " + isoId + " assigned to account: " + event.getAccountId() + "already exists in zone "+zoneId);
return;
}
Account acct = m_accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageStorageVO storageVO = new UsageStorageVO( isoId, zoneId, event.getAccountId(), acct.getDomainId(), StorageTypes.ISO, null,
isoSize, event.getCreateDate(), null);
m_usageStorageDao.persist(storageVO);
} else if (EventTypes.EVENT_ISO_DELETE.equals(event.getType())) {
List<UsageStorageVO> storageVOs;
if(zoneId != -1L){
storageVOs = m_usageStorageDao.listByIdAndZone(event.getAccountId(), isoId, StorageTypes.ISO, zoneId);
} else {
storageVOs = m_usageStorageDao.listById(event.getAccountId(), isoId, StorageTypes.ISO);
}
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + isoId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting iso: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageStorageDao.update(storageVO);
}
}
if (storageVOs.size() > 1) {
s_logger.warn("More that one usage entry for storage: " + isoId + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageStorageVO storageVO : storageVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting iso: " + storageVO.getId() + " from account: " + storageVO.getAccountId());
}
storageVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageStorageDao.update(storageVO);
}
}
}
private void createSnapshotHelperEvent(UsageEventVO event) {
@ -1234,36 +1234,36 @@ public class UsageManagerImpl implements UsageManager, Runnable {
private void createLoadBalancerHelperEvent(UsageEventVO event) {
long zoneId = -1L;
long zoneId = -1L;
long id = event.getResourceId();
long id = event.getResourceId();
if (EventTypes.EVENT_LOAD_BALANCER_CREATE.equals(event.getType())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating load balancer : " + id + " for account: " + event.getAccountId());
}
zoneId = event.getZoneId();
Account acct = m_accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageLoadBalancerPolicyVO lbVO = new UsageLoadBalancerPolicyVO(id, zoneId, event.getAccountId(), acct.getDomainId(),
event.getCreateDate(), null);
m_usageLoadBalancerPolicyDao.persist(lbVO);
} else if (EventTypes.EVENT_LOAD_BALANCER_DELETE.equals(event.getType())) {
SearchCriteria<UsageLoadBalancerPolicyVO> sc = m_usageLoadBalancerPolicyDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("id", SearchCriteria.Op.EQ, id);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageLoadBalancerPolicyVO> lbVOs = m_usageLoadBalancerPolicyDao.search(sc, null);
if (lbVOs.size() > 1) {
s_logger.warn("More that one usage entry for load balancer policy: " + id + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageLoadBalancerPolicyVO lbVO : lbVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting load balancer policy: " + lbVO.getId() + " from account: " + lbVO.getAccountId());
}
lbVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageLoadBalancerPolicyDao.update(lbVO);
}
}
if (EventTypes.EVENT_LOAD_BALANCER_CREATE.equals(event.getType())) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Creating load balancer : " + id + " for account: " + event.getAccountId());
}
zoneId = event.getZoneId();
Account acct = m_accountDao.findByIdIncludingRemoved(event.getAccountId());
UsageLoadBalancerPolicyVO lbVO = new UsageLoadBalancerPolicyVO(id, zoneId, event.getAccountId(), acct.getDomainId(),
event.getCreateDate(), null);
m_usageLoadBalancerPolicyDao.persist(lbVO);
} else if (EventTypes.EVENT_LOAD_BALANCER_DELETE.equals(event.getType())) {
SearchCriteria<UsageLoadBalancerPolicyVO> sc = m_usageLoadBalancerPolicyDao.createSearchCriteria();
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
sc.addAnd("id", SearchCriteria.Op.EQ, id);
sc.addAnd("deleted", SearchCriteria.Op.NULL);
List<UsageLoadBalancerPolicyVO> lbVOs = m_usageLoadBalancerPolicyDao.search(sc, null);
if (lbVOs.size() > 1) {
s_logger.warn("More that one usage entry for load balancer policy: " + id + " assigned to account: " + event.getAccountId() + "; marking them all as deleted...");
}
for (UsageLoadBalancerPolicyVO lbVO : lbVOs) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("deleting load balancer policy: " + lbVO.getId() + " from account: " + lbVO.getAccountId());
}
lbVO.setDeleted(event.getCreateDate()); // there really shouldn't be more than one
m_usageLoadBalancerPolicyDao.update(lbVO);
}
}
}
private void createPortForwardingHelperEvent(UsageEventVO event) {

View File

@ -67,7 +67,7 @@ public class UsageSanityChecker {
* Check for Vm usage records which are created after the vm is destroyed
*/
PreparedStatement pstmt = conn.prepareStatement("select count(*) from cloud_usage.cloud_usage cu inner join cloud.vm_instance vm where vm.type = 'User' " +
"and cu.usage_type in (1 , 2) and cu.usage_id = vm.id and cu.start_date > vm.removed"+lastCheckId);
"and cu.usage_type in (1 , 2) and cu.usage_id = vm.id and cu.start_date > vm.removed"+lastCheckId);
ResultSet rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" Vm usage records which are created after Vm is destroyed");
@ -79,7 +79,7 @@ public class UsageSanityChecker {
* Check for Vms which have multiple running vm records in helper table
*/
pstmt = conn.prepareStatement("select sum(cnt) from (select count(*) as cnt from cloud_usage.usage_vm_instance where usage_type =1 " +
"and end_date is null group by vm_instance_id having count(vm_instance_id) > 1) c ;");
"and end_date is null group by vm_instance_id having count(vm_instance_id) > 1) c ;");
rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" duplicate running Vm entries in vm usage helper table");
@ -103,7 +103,7 @@ public class UsageSanityChecker {
* Check for Vms which have running vm entry without allocated vm entry in helper table
*/
pstmt = conn.prepareStatement("select count(vm_instance_id) from cloud_usage.usage_vm_instance o where o.end_date is null and o.usage_type=1 and not exists " +
"(select 1 from cloud_usage.usage_vm_instance i where i.vm_instance_id=o.vm_instance_id and usage_type=2 and i.end_date is null)");
"(select 1 from cloud_usage.usage_vm_instance i where i.vm_instance_id=o.vm_instance_id and usage_type=2 and i.end_date is null)");
rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" running Vm entries without corresponding allocated entries in vm usage helper table");
@ -119,7 +119,7 @@ public class UsageSanityChecker {
* Check for Volume usage records which are created after the volume is removed
*/
PreparedStatement pstmt = conn.prepareStatement("select count(*) from cloud_usage.cloud_usage cu inner join cloud.volumes v " +
"where cu.usage_type = 6 and cu.usage_id = v.id and cu.start_date > v.removed"+lastCheckId);
"where cu.usage_type = 6 and cu.usage_id = v.id and cu.start_date > v.removed"+lastCheckId);
ResultSet rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" volume usage records which are created after volume is removed");
@ -131,7 +131,7 @@ public class UsageSanityChecker {
* Check for duplicate records in volume usage helper table
*/
pstmt = conn.prepareStatement("select sum(cnt) from (select count(*) as cnt from cloud_usage.usage_volume " +
"where deleted is null group by id having count(id) > 1) c;");
"where deleted is null group by id having count(id) > 1) c;");
rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" duplicate records is volume usage helper table");
@ -146,7 +146,7 @@ public class UsageSanityChecker {
* Check for Template/ISO usage records which are created after it is removed
*/
PreparedStatement pstmt = conn.prepareStatement("select count(*) from cloud_usage.cloud_usage cu inner join cloud.template_zone_ref tzr " +
"where cu.usage_id = tzr.template_id and cu.zone_id = tzr.zone_id and cu.usage_type in (7,8) and cu.start_date > tzr.removed"+lastCheckId);
"where cu.usage_id = tzr.template_id and cu.zone_id = tzr.zone_id and cu.usage_type in (7,8) and cu.start_date > tzr.removed"+lastCheckId);
ResultSet rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" template/ISO usage records which are created after it is removed");
@ -161,7 +161,7 @@ public class UsageSanityChecker {
* Check for snapshot usage records which are created after snapshot is removed
*/
PreparedStatement pstmt = conn.prepareStatement("select count(*) from cloud_usage.cloud_usage cu inner join cloud.snapshots s " +
"where cu.usage_id = s.id and cu.usage_type = 9 and cu.start_date > s.removed"+lastCheckId);
"where cu.usage_id = s.id and cu.usage_type = 9 and cu.start_date > s.removed"+lastCheckId);
ResultSet rs = pstmt.executeQuery();
if(rs.next() && (rs.getInt(1) > 0)){
errors.append("Error: Found "+rs.getInt(1)+" snapshot usage records which are created after snapshot is removed");

View File

@ -138,7 +138,7 @@ public class IPAddressUsageParser {
// Create the usage record
UsageVO usageRecord = new UsageVO(zoneId, account.getAccountId(), account.getDomainId(), usageDesc, usageDisplay + " Hrs", UsageTypes.IP_ADDRESS, new Double(usage), IpId,
(isSystem?1:0), (isSourceNat?"SourceNat":""), startDate, endDate);
(isSystem?1:0), (isSourceNat?"SourceNat":""), startDate, endDate);
m_usageDao.persist(usageRecord);
}

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class LoadBalancerUsageParser {
public static final Logger s_logger = Logger.getLogger(LoadBalancerUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageLoadBalancerPolicyDao m_usageLoadBalancerPolicyDao = _locator.getDao(UsageLoadBalancerPolicyDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all LoadBalancerPolicy usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(LoadBalancerUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageLoadBalancerPolicyDao m_usageLoadBalancerPolicyDao = _locator.getDao(UsageLoadBalancerPolicyDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all LoadBalancerPolicy usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -57,15 +57,15 @@ public class LoadBalancerUsageParser {
List<UsageLoadBalancerPolicyVO> usageLBs = m_usageLoadBalancerPolicyDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageLBs.isEmpty()){
s_logger.debug("No load balancer usage events for this period");
return true;
s_logger.debug("No load balancer usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
Map<String, LBInfo> lbMap = new HashMap<String, LBInfo>();
// loop through all the load balancer policies, create a usage record for each
// loop through all the load balancer policies, create a usage record for each
for (UsageLoadBalancerPolicyVO usageLB : usageLBs) {
long lbId = usageLB.getId();
String key = ""+lbId;
@ -76,12 +76,12 @@ public class LoadBalancerUsageParser {
Date lbDeleteDate = usageLB.getDeleted();
if ((lbDeleteDate == null) || lbDeleteDate.after(endDate)) {
lbDeleteDate = endDate;
lbDeleteDate = endDate;
}
// clip the start date to the beginning of our aggregation range if the vm has been running for a while
if (lbCreateDate.before(startDate)) {
lbCreateDate = startDate;
lbCreateDate = startDate;
}
long currentDuration = (lbDeleteDate.getTime() - lbCreateDate.getTime()) + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to find total number of millis to charge)
@ -96,27 +96,27 @@ public class LoadBalancerUsageParser {
// Only create a usage record if we have a runningTime of bigger than zero.
if (useTime > 0L) {
LBInfo info = lbMap.get(lbIdKey);
LBInfo info = lbMap.get(lbIdKey);
createUsageRecord(UsageTypes.LOAD_BALANCER_POLICY, useTime, startDate, endDate, account, info.getId(), info.getZoneId() );
}
}
return true;
}
}
private static void updateLBUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long lbId, long duration) {
private static void updateLBUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long lbId, long duration) {
Pair<Long, Long> lbUsageInfo = usageDataMap.get(key);
if (lbUsageInfo == null) {
lbUsageInfo = new Pair<Long, Long>(new Long(lbId), new Long(duration));
lbUsageInfo = new Pair<Long, Long>(new Long(lbId), new Long(duration));
} else {
Long runningTime = lbUsageInfo.second();
runningTime = new Long(runningTime.longValue() + duration);
lbUsageInfo = new Pair<Long, Long>(lbUsageInfo.first(), runningTime);
}
usageDataMap.put(key, lbUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long lbId, long zoneId) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long lbId, long zoneId) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -139,21 +139,21 @@ public class LoadBalancerUsageParser {
new Double(usage), null, null, null, null, lbId, null, startDate, endDate);
m_usageDao.persist(usageRecord);
}
private static class LBInfo {
private long id;
private long zoneId;
private static class LBInfo {
private long id;
private long zoneId;
public LBInfo(long id, long zoneId) {
this.id = id;
this.zoneId = zoneId;
}
public long getZoneId() {
return zoneId;
}
public long getId() {
return id;
}
}
public LBInfo(long id, long zoneId) {
this.id = id;
this.zoneId = zoneId;
}
public long getZoneId() {
return zoneId;
}
public long getId() {
return id;
}
}
}

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class NetworkOfferingUsageParser {
public static final Logger s_logger = Logger.getLogger(NetworkOfferingUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageNetworkOfferingDao m_usageNetworkOfferingDao = _locator.getDao(UsageNetworkOfferingDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all NetworkOffering usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(NetworkOfferingUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageNetworkOfferingDao m_usageNetworkOfferingDao = _locator.getDao(UsageNetworkOfferingDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all NetworkOffering usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -57,15 +57,15 @@ public class NetworkOfferingUsageParser {
List<UsageNetworkOfferingVO> usageNOs = m_usageNetworkOfferingDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageNOs.isEmpty()){
s_logger.debug("No NetworkOffering usage events for this period");
return true;
s_logger.debug("No NetworkOffering usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
Map<String, NOInfo> noMap = new HashMap<String, NOInfo>();
// loop through all the network offerings, create a usage record for each
// loop through all the network offerings, create a usage record for each
for (UsageNetworkOfferingVO usageNO : usageNOs) {
long vmId = usageNO.getVmInstanceId();
long noId = usageNO.getNetworkOfferingId();
@ -103,9 +103,9 @@ public class NetworkOfferingUsageParser {
}
return true;
}
}
private static void updateNOUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long vmId, long duration) {
private static void updateNOUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long vmId, long duration) {
Pair<Long, Long> noUsageInfo = usageDataMap.get(key);
if (noUsageInfo == null) {
noUsageInfo = new Pair<Long, Long>(new Long(vmId), new Long(duration));
@ -115,9 +115,9 @@ public class NetworkOfferingUsageParser {
noUsageInfo = new Pair<Long, Long>(noUsageInfo.first(), runningTime);
}
usageDataMap.put(key, noUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, long noId, long zoneId, boolean isDefault) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, long noId, long zoneId, boolean isDefault) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -140,32 +140,32 @@ public class NetworkOfferingUsageParser {
new Double(usage), vmId, null, noId, null, defaultNic, null, startDate, endDate);
m_usageDao.persist(usageRecord);
}
private static class NOInfo {
private long vmId;
private long zoneId;
private long noId;
private boolean isDefault;
private static class NOInfo {
private long vmId;
private long zoneId;
private long noId;
private boolean isDefault;
public NOInfo(long vmId, long zoneId, long noId, boolean isDefault) {
this.vmId = vmId;
this.zoneId = zoneId;
this.noId = noId;
this.isDefault = isDefault;
}
public long getZoneId() {
return zoneId;
}
public long getVmId() {
return vmId;
}
public long getNOId() {
return noId;
}
public boolean isDefault(){
return isDefault;
}
}
public NOInfo(long vmId, long zoneId, long noId, boolean isDefault) {
this.vmId = vmId;
this.zoneId = zoneId;
this.noId = noId;
this.isDefault = isDefault;
}
public long getZoneId() {
return zoneId;
}
public long getVmId() {
return vmId;
}
public long getNOId() {
return noId;
}
public boolean isDefault(){
return isDefault;
}
}
}

View File

@ -37,18 +37,18 @@ import com.cloud.utils.db.SearchCriteria;
public class NetworkUsageParser {
public static final Logger s_logger = Logger.getLogger(NetworkUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageNetworkDao m_usageNetworkDao = _locator.getDao(UsageNetworkDao.class);
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageNetworkDao m_usageNetworkDao = _locator.getDao(UsageNetworkDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Network usage events for account: " + account.getId());
}
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Network usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_network table for all entries for userId with
// event_date in the given range
@ -117,10 +117,10 @@ public static final Logger s_logger = Logger.getLogger(NetworkUsageParser.class.
}
}
return true;
}
private static class NetworkInfo {
return true;
}
private static class NetworkInfo {
private long zoneId;
private long hostId;
private String hostType;

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class PortForwardingUsageParser {
public static final Logger s_logger = Logger.getLogger(PortForwardingUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsagePortForwardingRuleDao m_usagePFRuleDao = _locator.getDao(UsagePortForwardingRuleDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all PortForwardingRule usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(PortForwardingUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsagePortForwardingRuleDao m_usagePFRuleDao = _locator.getDao(UsagePortForwardingRuleDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all PortForwardingRule usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -57,15 +57,15 @@ public class PortForwardingUsageParser {
List<UsagePortForwardingRuleVO> usagePFs = m_usagePFRuleDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usagePFs.isEmpty()){
s_logger.debug("No port forwarding usage events for this period");
return true;
s_logger.debug("No port forwarding usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
Map<String, PFInfo> pfMap = new HashMap<String, PFInfo>();
// loop through all the port forwarding rule, create a usage record for each
// loop through all the port forwarding rule, create a usage record for each
for (UsagePortForwardingRuleVO usagePF : usagePFs) {
long pfId = usagePF.getId();
String key = ""+pfId;
@ -102,9 +102,9 @@ public class PortForwardingUsageParser {
}
return true;
}
}
private static void updatePFUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long pfId, long duration) {
private static void updatePFUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long pfId, long duration) {
Pair<Long, Long> pfUsageInfo = usageDataMap.get(key);
if (pfUsageInfo == null) {
pfUsageInfo = new Pair<Long, Long>(new Long(pfId), new Long(duration));
@ -114,9 +114,9 @@ public class PortForwardingUsageParser {
pfUsageInfo = new Pair<Long, Long>(pfUsageInfo.first(), runningTime);
}
usageDataMap.put(key, pfUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long pfId, long zoneId) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long pfId, long zoneId) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -139,21 +139,21 @@ public class PortForwardingUsageParser {
new Double(usage), null, null, null, null, pfId, null, startDate, endDate);
m_usageDao.persist(usageRecord);
}
private static class PFInfo {
private long id;
private long zoneId;
private static class PFInfo {
private long id;
private long zoneId;
public PFInfo(long id, long zoneId) {
this.id = id;
this.zoneId = zoneId;
}
public long getZoneId() {
return zoneId;
}
public long getId() {
return id;
}
}
public PFInfo(long id, long zoneId) {
this.id = id;
this.zoneId = zoneId;
}
public long getZoneId() {
return zoneId;
}
public long getId() {
return id;
}
}
}

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class SecurityGroupUsageParser {
public static final Logger s_logger = Logger.getLogger(SecurityGroupUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageSecurityGroupDao m_usageSecurityGroupDao = _locator.getDao(UsageSecurityGroupDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all SecurityGroup usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(SecurityGroupUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageSecurityGroupDao m_usageSecurityGroupDao = _locator.getDao(UsageSecurityGroupDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all SecurityGroup usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -57,15 +57,15 @@ public class SecurityGroupUsageParser {
List<UsageSecurityGroupVO> usageSGs = m_usageSecurityGroupDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageSGs.isEmpty()){
s_logger.debug("No SecurityGroup usage events for this period");
return true;
s_logger.debug("No SecurityGroup usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
Map<String, SGInfo> sgMap = new HashMap<String, SGInfo>();
// loop through all the security groups, create a usage record for each
// loop through all the security groups, create a usage record for each
for (UsageSecurityGroupVO usageSG : usageSGs) {
long vmId = usageSG.getVmInstanceId();
long sgId = usageSG.getSecurityGroupId();
@ -103,9 +103,9 @@ public class SecurityGroupUsageParser {
}
return true;
}
}
private static void updateSGUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long vmId, long duration) {
private static void updateSGUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long vmId, long duration) {
Pair<Long, Long> sgUsageInfo = usageDataMap.get(key);
if (sgUsageInfo == null) {
sgUsageInfo = new Pair<Long, Long>(new Long(vmId), new Long(duration));
@ -115,9 +115,9 @@ public class SecurityGroupUsageParser {
sgUsageInfo = new Pair<Long, Long>(sgUsageInfo.first(), runningTime);
}
usageDataMap.put(key, sgUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, long sgId, long zoneId) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, long sgId, long zoneId) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -139,26 +139,26 @@ public class SecurityGroupUsageParser {
new Double(usage), vmId, null, null, null, sgId, null, startDate, endDate);
m_usageDao.persist(usageRecord);
}
private static class SGInfo {
private long vmId;
private long zoneId;
private long sgId;
private static class SGInfo {
private long vmId;
private long zoneId;
private long sgId;
public SGInfo(long vmId, long zoneId, long sgId) {
this.vmId = vmId;
this.zoneId = zoneId;
this.sgId = sgId;
}
public long getZoneId() {
return zoneId;
}
public long getVmId() {
return vmId;
}
public long getSGId() {
return sgId;
}
}
public SGInfo(long vmId, long zoneId, long sgId) {
this.vmId = vmId;
this.zoneId = zoneId;
this.sgId = sgId;
}
public long getZoneId() {
return zoneId;
}
public long getVmId() {
return vmId;
}
public long getSGId() {
return sgId;
}
}
}

View File

@ -36,19 +36,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class StorageUsageParser {
public static final Logger s_logger = Logger.getLogger(StorageUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageStorageDao m_usageStorageDao = _locator.getDao(UsageStorageDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Storage usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(StorageUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageStorageDao m_usageStorageDao = _locator.getDao(UsageStorageDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Storage usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -58,8 +58,8 @@ public class StorageUsageParser {
List<UsageStorageVO> usageUsageStorages = m_usageStorageDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageUsageStorages.isEmpty()){
s_logger.debug("No Storage usage events for this period");
return true;
s_logger.debug("No Storage usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
@ -67,7 +67,7 @@ public class StorageUsageParser {
Map<String, StorageInfo> storageMap = new HashMap<String, StorageInfo>();
// loop through all the usage volumes, create a usage record for each
// loop through all the usage volumes, create a usage record for each
for (UsageStorageVO usageStorage : usageUsageStorages) {
long storageId = usageStorage.getId();
int storage_type = usageStorage.getStorageType();
@ -84,12 +84,12 @@ public class StorageUsageParser {
Date storageDeleteDate = usageStorage.getDeleted();
if ((storageDeleteDate == null) || storageDeleteDate.after(endDate)) {
storageDeleteDate = endDate;
storageDeleteDate = endDate;
}
// clip the start date to the beginning of our aggregation range if the vm has been running for a while
if (storageCreateDate.before(startDate)) {
storageCreateDate = startDate;
storageCreateDate = startDate;
}
long currentDuration = (storageDeleteDate.getTime() - storageCreateDate.getTime()) + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to find total number of millis to charge)
@ -109,21 +109,21 @@ public class StorageUsageParser {
}
return true;
}
}
private static void updateStorageUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long storageId, long duration) {
private static void updateStorageUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long storageId, long duration) {
Pair<Long, Long> volUsageInfo = usageDataMap.get(key);
if (volUsageInfo == null) {
volUsageInfo = new Pair<Long, Long>(new Long(storageId), new Long(duration));
volUsageInfo = new Pair<Long, Long>(new Long(storageId), new Long(duration));
} else {
Long runningTime = volUsageInfo.second();
runningTime = new Long(runningTime.longValue() + duration);
volUsageInfo = new Pair<Long, Long>(volUsageInfo.first(), runningTime);
}
usageDataMap.put(key, volUsageInfo);
}
}
private static void createUsageRecord(long zoneId, int type, long runningTime, Date startDate, Date endDate, AccountVO account, long storageId, Long sourceId, long size) {
private static void createUsageRecord(long zoneId, int type, long runningTime, Date startDate, Date endDate, AccountVO account, long storageId, Long sourceId, long size) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -143,19 +143,19 @@ public class StorageUsageParser {
int usage_type = 0;
switch(type){
case StorageTypes.TEMPLATE:
usage_type = UsageTypes.TEMPLATE;
usageDesc += "Template ";
tmplSourceId = sourceId;
break;
case StorageTypes.ISO:
usage_type = UsageTypes.ISO;
usageDesc += "ISO ";
break;
case StorageTypes.SNAPSHOT:
usage_type = UsageTypes.SNAPSHOT;
usageDesc += "Snapshot ";
break;
case StorageTypes.TEMPLATE:
usage_type = UsageTypes.TEMPLATE;
usageDesc += "Template ";
tmplSourceId = sourceId;
break;
case StorageTypes.ISO:
usage_type = UsageTypes.ISO;
usageDesc += "ISO ";
break;
case StorageTypes.SNAPSHOT:
usage_type = UsageTypes.SNAPSHOT;
usageDesc += "Snapshot ";
break;
}
// Create the usage record
usageDesc += "Id:"+storageId+" Size:"+size;
@ -166,40 +166,40 @@ public class StorageUsageParser {
m_usageDao.persist(usageRecord);
}
private static class StorageInfo {
private long zoneId;
private long storageId;
private int storageType;
private Long sourceId;
private long size;
private static class StorageInfo {
private long zoneId;
private long storageId;
private int storageType;
private Long sourceId;
private long size;
public StorageInfo(long zoneId, long storageId, int storageType, Long sourceId, long size) {
this.zoneId = zoneId;
this.storageId = storageId;
this.storageType = storageType;
this.sourceId = sourceId;
this.size = size;
}
public StorageInfo(long zoneId, long storageId, int storageType, Long sourceId, long size) {
this.zoneId = zoneId;
this.storageId = storageId;
this.storageType = storageType;
this.sourceId = sourceId;
this.size = size;
}
public long getZoneId() {
return zoneId;
}
public long getStorageId() {
return storageId;
}
public long getZoneId() {
return zoneId;
}
public long getStorageId() {
return storageId;
}
public int getStorageType() {
return storageType;
}
public int getStorageType() {
return storageType;
}
public long getSourceId() {
return sourceId;
}
public long getSourceId() {
return sourceId;
}
public long getSize() {
return size;
}
}
public long getSize() {
return size;
}
}
}

View File

@ -21,15 +21,15 @@ import java.util.Date;
import org.apache.log4j.Logger;
public abstract class UsageParser implements Runnable {
public static final Logger s_logger = Logger.getLogger(UsageParser.class.getName());
public void run() {
try {
parse(null);
} catch (Exception e) {
s_logger.warn("Error while parsing usage events", e);
}
}
public abstract void parse(Date endDate);
public static final Logger s_logger = Logger.getLogger(UsageParser.class.getName());
public void run() {
try {
parse(null);
} catch (Exception e) {
s_logger.warn("Error while parsing usage events", e);
}
}
public abstract void parse(Date endDate);
}

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class VMInstanceUsageParser {
public static final Logger s_logger = Logger.getLogger(VMInstanceUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVMInstanceDao m_usageInstanceDao = _locator.getDao(UsageVMInstanceDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all VMInstance usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(VMInstanceUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVMInstanceDao m_usageInstanceDao = _locator.getDao(UsageVMInstanceDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all VMInstance usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_vm_instance table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -63,7 +63,7 @@ public class VMInstanceUsageParser {
Map<String, VMInfo> vmServiceOfferingMap = new HashMap<String, VMInfo>();
// loop through all the usage instances, create a usage record for each
// loop through all the usage instances, create a usage record for each
for (UsageVMInstanceVO usageInstance : usageInstances) {
long vmId = usageInstance.getVmInstanceId();
long soId = usageInstance.getSerivceOfferingId();
@ -124,9 +124,9 @@ public class VMInstanceUsageParser {
}
return true;
}
}
private static void updateVmUsageData(Map<String, Pair<String, Long>> usageDataMap, String key, String vmName, long duration) {
private static void updateVmUsageData(Map<String, Pair<String, Long>> usageDataMap, String key, String vmName, long duration) {
Pair<String, Long> vmUsageInfo = usageDataMap.get(key);
if (vmUsageInfo == null) {
vmUsageInfo = new Pair<String, Long>(vmName, new Long(duration));
@ -136,9 +136,9 @@ public class VMInstanceUsageParser {
vmUsageInfo = new Pair<String, Long>(vmUsageInfo.first(), runningTime);
}
usageDataMap.put(key, vmUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, String vmName, long zoneId, long serviceOfferingId, long templateId, String hypervisorType) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long vmId, String vmName, long zoneId, long serviceOfferingId, long templateId, String hypervisorType) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -166,35 +166,35 @@ public class VMInstanceUsageParser {
m_usageDao.persist(usageRecord);
}
private static class VMInfo {
private long virtualMachineId;
private long zoneId;
private static class VMInfo {
private long virtualMachineId;
private long zoneId;
private long serviceOfferingId;
private long templateId;
private String hypervisorType;
private long templateId;
private String hypervisorType;
public VMInfo(long vmId, long zId, long soId, long tId, String hypervisorType) {
virtualMachineId = vmId;
zoneId = zId;
serviceOfferingId = soId;
templateId = tId;
this.hypervisorType = hypervisorType;
}
public VMInfo(long vmId, long zId, long soId, long tId, String hypervisorType) {
virtualMachineId = vmId;
zoneId = zId;
serviceOfferingId = soId;
templateId = tId;
this.hypervisorType = hypervisorType;
}
public long getZoneId() {
return zoneId;
}
public long getVirtualMachineId() {
return virtualMachineId;
}
public long getServiceOfferingId() {
return serviceOfferingId;
}
public long getTemplateId() {
return templateId;
}
private String getHypervisorType(){
return hypervisorType;
}
}
public long getZoneId() {
return zoneId;
}
public long getVirtualMachineId() {
return virtualMachineId;
}
public long getServiceOfferingId() {
return serviceOfferingId;
}
public long getTemplateId() {
return templateId;
}
private String getHypervisorType(){
return hypervisorType;
}
}
}

View File

@ -35,32 +35,32 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class VPNUserUsageParser {
public static final Logger s_logger = Logger.getLogger(VPNUserUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVPNUserDao m_usageVPNUserDao = _locator.getDao(UsageVPNUserDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all VPN user usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(VPNUserUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVPNUserDao m_usageVPNUserDao = _locator.getDao(UsageVPNUserDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all VPN user usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
List<UsageVPNUserVO> usageVUs = m_usageVPNUserDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageVUs.isEmpty()){
s_logger.debug("No VPN user usage events for this period");
return true;
s_logger.debug("No VPN user usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
Map<String, VUInfo> vuMap = new HashMap<String, VUInfo>();
// loop through all the VPN user usage, create a usage record for each
// loop through all the VPN user usage, create a usage record for each
for (UsageVPNUserVO usageVU : usageVUs) {
long userId = usageVU.getUserId();
String userName = usageVU.getUsername();
@ -72,12 +72,12 @@ public class VPNUserUsageParser {
Date vuDeleteDate = usageVU.getDeleted();
if ((vuDeleteDate == null) || vuDeleteDate.after(endDate)) {
vuDeleteDate = endDate;
vuDeleteDate = endDate;
}
// clip the start date to the beginning of our aggregation range if the vm has been running for a while
if (vuCreateDate.before(startDate)) {
vuCreateDate = startDate;
vuCreateDate = startDate;
}
long currentDuration = (vuDeleteDate.getTime() - vuCreateDate.getTime()) + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to find total number of millis to charge)
@ -98,21 +98,21 @@ public class VPNUserUsageParser {
}
return true;
}
}
private static void updateVUUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long userId, long duration) {
private static void updateVUUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long userId, long duration) {
Pair<Long, Long> vuUsageInfo = usageDataMap.get(key);
if (vuUsageInfo == null) {
vuUsageInfo = new Pair<Long, Long>(new Long(userId), new Long(duration));
vuUsageInfo = new Pair<Long, Long>(new Long(userId), new Long(duration));
} else {
Long runningTime = vuUsageInfo.second();
runningTime = new Long(runningTime.longValue() + duration);
vuUsageInfo = new Pair<Long, Long>(vuUsageInfo.first(), runningTime);
}
usageDataMap.put(key, vuUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long userId, String userName, long zoneId) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long userId, String userName, long zoneId) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -134,26 +134,26 @@ public class VPNUserUsageParser {
new Double(usage), null, null, null, null, userId, null, startDate, endDate);
m_usageDao.persist(usageRecord);
}
private static class VUInfo {
private long userId;
private long zoneId;
private String userName;
private static class VUInfo {
private long userId;
private long zoneId;
private String userName;
public VUInfo(long userId, long zoneId, String userName) {
this.userId = userId;
this.zoneId = zoneId;
this.userName = userName;
}
public long getZoneId() {
return zoneId;
}
public long getUserId() {
return userId;
}
public String getUserName() {
return userName;
}
}
public VUInfo(long userId, long zoneId, String userName) {
this.userId = userId;
this.zoneId = zoneId;
this.userName = userName;
}
public long getZoneId() {
return zoneId;
}
public long getUserId() {
return userId;
}
public String getUserName() {
return userName;
}
}
}

View File

@ -35,19 +35,19 @@ import com.cloud.utils.Pair;
import com.cloud.utils.component.ComponentLocator;
public class VolumeUsageParser {
public static final Logger s_logger = Logger.getLogger(VolumeUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVolumeDao m_usageVolumeDao = _locator.getDao(UsageVolumeDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Volume usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
public static final Logger s_logger = Logger.getLogger(VolumeUsageParser.class.getName());
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
private static UsageVolumeDao m_usageVolumeDao = _locator.getDao(UsageVolumeDao.class);
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
if (s_logger.isDebugEnabled()) {
s_logger.debug("Parsing all Volume usage events for account: " + account.getId());
}
if ((endDate == null) || endDate.after(new Date())) {
endDate = new Date();
}
// - query usage_volume table with the following criteria:
// - look for an entry for accountId with start date in the given range
@ -57,8 +57,8 @@ public class VolumeUsageParser {
List<UsageVolumeVO> usageUsageVols = m_usageVolumeDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, 0);
if(usageUsageVols.isEmpty()){
s_logger.debug("No volume usage events for this period");
return true;
s_logger.debug("No volume usage events for this period");
return true;
}
// This map has both the running time *and* the usage amount.
@ -66,7 +66,7 @@ public class VolumeUsageParser {
Map<String, VolInfo> diskOfferingMap = new HashMap<String, VolInfo>();
// loop through all the usage volumes, create a usage record for each
// loop through all the usage volumes, create a usage record for each
for (UsageVolumeVO usageVol : usageUsageVols) {
long volId = usageVol.getId();
Long doId = usageVol.getDiskOfferingId();
@ -81,12 +81,12 @@ public class VolumeUsageParser {
Date volDeleteDate = usageVol.getDeleted();
if ((volDeleteDate == null) || volDeleteDate.after(endDate)) {
volDeleteDate = endDate;
volDeleteDate = endDate;
}
// clip the start date to the beginning of our aggregation range if the vm has been running for a while
if (volCreateDate.before(startDate)) {
volCreateDate = startDate;
volCreateDate = startDate;
}
long currentDuration = (volDeleteDate.getTime() - volCreateDate.getTime()) + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to find total number of millis to charge)
@ -107,21 +107,21 @@ public class VolumeUsageParser {
}
return true;
}
}
private static void updateVolUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long volId, long duration) {
private static void updateVolUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long volId, long duration) {
Pair<Long, Long> volUsageInfo = usageDataMap.get(key);
if (volUsageInfo == null) {
volUsageInfo = new Pair<Long, Long>(new Long(volId), new Long(duration));
volUsageInfo = new Pair<Long, Long>(new Long(volId), new Long(duration));
} else {
Long runningTime = volUsageInfo.second();
runningTime = new Long(runningTime.longValue() + duration);
volUsageInfo = new Pair<Long, Long>(volUsageInfo.first(), runningTime);
}
usageDataMap.put(key, volUsageInfo);
}
}
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long volId, long zoneId, Long doId, Long templateId, long size) {
private static void createUsageRecord(int type, long runningTime, Date startDate, Date endDate, AccountVO account, long volId, long zoneId, Long doId, Long templateId, long size) {
// Our smallest increment is hourly for now
if (s_logger.isDebugEnabled()) {
s_logger.debug("Total running time " + runningTime + "ms");
@ -150,34 +150,34 @@ public class VolumeUsageParser {
m_usageDao.persist(usageRecord);
}
private static class VolInfo {
private long volId;
private long zoneId;
private static class VolInfo {
private long volId;
private long zoneId;
private Long diskOfferingId;
private Long templateId;
private long size;
public VolInfo(long volId, long zoneId, Long diskOfferingId, Long templateId, long size) {
this.volId = volId;
this.zoneId = zoneId;
this.diskOfferingId = diskOfferingId;
this.templateId = templateId;
this.size = size;
}
public long getZoneId() {
return zoneId;
}
public long getVolumeId() {
return volId;
}
public Long getDiskOfferingId() {
return diskOfferingId;
}
public VolInfo(long volId, long zoneId, Long diskOfferingId, Long templateId, long size) {
this.volId = volId;
this.zoneId = zoneId;
this.diskOfferingId = diskOfferingId;
this.templateId = templateId;
this.size = size;
}
public long getZoneId() {
return zoneId;
}
public long getVolumeId() {
return volId;
}
public Long getDiskOfferingId() {
return diskOfferingId;
}
public Long getTemplateId() {
return templateId;
}
}
public long getSize() {
return size;
}
}
}
}