mirror of
https://github.com/apache/cloudstack.git
synced 2025-10-26 08:42:29 +01:00
fixing usage build in amster
This commit is contained in:
parent
58841d0bc4
commit
639f25c259
@ -1,5 +1,7 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage;
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage;
|
||||
|
||||
@ -1,10 +1,13 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Calendar;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
@ -36,6 +39,7 @@ import com.cloud.usage.dao.UsagePortForwardingRuleDao;
|
||||
import com.cloud.usage.dao.UsageStorageDao;
|
||||
import com.cloud.usage.dao.UsageVMInstanceDao;
|
||||
import com.cloud.usage.dao.UsageVolumeDao;
|
||||
import com.cloud.usage.parser.IPAddressUsageParser;
|
||||
import com.cloud.usage.parser.LoadBalancerUsageParser;
|
||||
import com.cloud.usage.parser.NetworkOfferingUsageParser;
|
||||
import com.cloud.usage.parser.NetworkUsageParser;
|
||||
@ -90,16 +94,19 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
private String m_version = null;
|
||||
private String m_name = null;
|
||||
private final Calendar m_jobExecTime = Calendar.getInstance();
|
||||
private int m_aggregationDuration = 0;
|
||||
private int m_aggregationDuration = 0;
|
||||
private int m_sanityCheckInterval = 0;
|
||||
String m_hostname = null;
|
||||
int m_pid = 0;
|
||||
TimeZone m_usageTimezone = TimeZone.getTimeZone("GMT");;
|
||||
private final GlobalLock m_heartbeatLock = GlobalLock.getInternLock("usage.job.heartbeat.check");
|
||||
|
||||
private final ScheduledExecutorService m_executor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Job"));
|
||||
private final ScheduledExecutorService m_heartbeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-HB"));
|
||||
private final ScheduledExecutorService m_heartbeatExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-HB"));
|
||||
private final ScheduledExecutorService m_sanityExecutor = Executors.newSingleThreadScheduledExecutor(new NamedThreadFactory("Usage-Sanity"));
|
||||
private Future m_scheduledFuture = null;
|
||||
private Future m_heartbeat = null;
|
||||
private Future m_heartbeat = null;
|
||||
private Future m_sanity = null;
|
||||
|
||||
protected UsageManagerImpl() {
|
||||
}
|
||||
@ -144,8 +151,12 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
|
||||
String execTime = configs.get("usage.stats.job.exec.time");
|
||||
String aggregationRange = configs.get("usage.stats.job.aggregation.range");
|
||||
String execTimeZone = configs.get("usage.execution.timezone");
|
||||
String execTimeZone = configs.get("usage.execution.timezone");
|
||||
String aggreagationTimeZone = configs.get("usage.aggregation.timezone");
|
||||
String sanityCheckInterval = configs.get("usage.sanity.check.interval");
|
||||
if(sanityCheckInterval != null){
|
||||
m_sanityCheckInterval = Integer.parseInt(sanityCheckInterval);
|
||||
}
|
||||
|
||||
m_usageTimezone = TimeZone.getTimeZone(aggreagationTimeZone);
|
||||
s_logger.debug("Usage stats aggregation time zone: "+aggreagationTimeZone);
|
||||
@ -210,7 +221,11 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
// use the configured exec time and aggregation duration for scheduling the job
|
||||
m_scheduledFuture = m_executor.scheduleAtFixedRate(this, m_jobExecTime.getTimeInMillis() - System.currentTimeMillis(), m_aggregationDuration * 60 * 1000, TimeUnit.MILLISECONDS);
|
||||
|
||||
m_heartbeat = m_heartbeatExecutor.scheduleAtFixedRate(new Heartbeat(), /* start in 15 seconds...*/15*1000, /* check database every minute*/60*1000, TimeUnit.MILLISECONDS);
|
||||
m_heartbeat = m_heartbeatExecutor.scheduleAtFixedRate(new Heartbeat(), /* start in 15 seconds...*/15*1000, /* check database every minute*/60*1000, TimeUnit.MILLISECONDS);
|
||||
|
||||
if(m_sanityCheckInterval > 0){
|
||||
m_sanity = m_sanityExecutor.scheduleAtFixedRate(new SanityCheck(), 1, m_sanityCheckInterval, TimeUnit.DAYS);
|
||||
}
|
||||
|
||||
Transaction usageTxn = Transaction.open(Transaction.USAGE_DB);
|
||||
try {
|
||||
@ -236,7 +251,8 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
|
||||
public boolean stop() {
|
||||
m_heartbeat.cancel(true);
|
||||
m_scheduledFuture.cancel(true);
|
||||
m_scheduledFuture.cancel(true);
|
||||
m_sanity.cancel(true);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -710,6 +726,13 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
if (!parsed) {
|
||||
s_logger.debug("network offering usage successfully parsed? " + parsed + " (for account: " + account.getAccountName() + ", id: " + account.getId() + ")");
|
||||
}
|
||||
}
|
||||
|
||||
parsed = IPAddressUsageParser.parse(account, currentStartDate, currentEndDate);
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
if (!parsed) {
|
||||
s_logger.debug("IPAddress usage successfully parsed? " + parsed + " (for account: " + account.getAccountName() + ", id: " + account.getId() + ")");
|
||||
}
|
||||
}
|
||||
|
||||
return parsed;
|
||||
@ -818,7 +841,16 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
usageInstance.setEndDate(event.getCreateDate());
|
||||
m_usageInstanceDao.update(usageInstance);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
sc = m_usageInstanceDao.createSearchCriteria();
|
||||
sc.addAnd("vmInstanceId", SearchCriteria.Op.EQ, Long.valueOf(vmId));
|
||||
sc.addAnd("endDate", SearchCriteria.Op.NULL);
|
||||
sc.addAnd("usageType", SearchCriteria.Op.EQ, UsageTypes.ALLOCATED_VM);
|
||||
usageInstances = m_usageInstanceDao.search(sc, null);
|
||||
if (usageInstances == null || (usageInstances.size() == 0)) {
|
||||
s_logger.error("Cannot find allocated vm entry for a vm running with id: " + vmId);
|
||||
}
|
||||
|
||||
Long templateId = event.getTemplateId();
|
||||
@ -989,7 +1021,23 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
size = event.getSize();
|
||||
}
|
||||
|
||||
if (EventTypes.EVENT_VOLUME_CREATE.equals(event.getType())) {
|
||||
if (EventTypes.EVENT_VOLUME_CREATE.equals(event.getType())) {
|
||||
SearchCriteria<UsageVolumeVO> sc = m_usageVolumeDao.createSearchCriteria();
|
||||
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
|
||||
sc.addAnd("id", SearchCriteria.Op.EQ, volId);
|
||||
sc.addAnd("deleted", SearchCriteria.Op.NULL);
|
||||
List<UsageVolumeVO> volumesVOs = m_usageVolumeDao.search(sc, null);
|
||||
if (volumesVOs.size() > 0) {
|
||||
//This is a safeguard to avoid double counting of volumes.
|
||||
s_logger.error("Found duplicate usage entry for volume: " + volId + " assigned to account: " + event.getAccountId() + "; marking as deleted...");
|
||||
}
|
||||
for (UsageVolumeVO volumesVO : volumesVOs) {
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("deleting volume: " + volumesVO.getId() + " from account: " + volumesVO.getAccountId());
|
||||
}
|
||||
volumesVO.setDeleted(event.getCreateDate());
|
||||
m_usageVolumeDao.update(volumesVO);
|
||||
}
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("create volume with id : " + volId + " for account: " + event.getAccountId());
|
||||
}
|
||||
@ -997,7 +1045,6 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
UsageVolumeVO volumeVO = new UsageVolumeVO(volId, zoneId, event.getAccountId(), acct.getDomainId(), doId, templateId, size, event.getCreateDate(), null);
|
||||
m_usageVolumeDao.persist(volumeVO);
|
||||
} else if (EventTypes.EVENT_VOLUME_DELETE.equals(event.getType())) {
|
||||
// at this point it's not a sourceNat IP, so find the usage record with this IP and a null released date, update the released date
|
||||
SearchCriteria<UsageVolumeVO> sc = m_usageVolumeDao.createSearchCriteria();
|
||||
sc.addAnd("accountId", SearchCriteria.Op.EQ, event.getAccountId());
|
||||
sc.addAnd("id", SearchCriteria.Op.EQ, volId);
|
||||
@ -1344,5 +1391,21 @@ public class UsageManagerImpl implements UsageManager, Runnable {
|
||||
sc.addAnd("scheduled", SearchCriteria.Op.EQ, Integer.valueOf(0));
|
||||
m_usageJobDao.expunge(sc);
|
||||
}
|
||||
}
|
||||
|
||||
private class SanityCheck implements Runnable {
|
||||
public void run() {
|
||||
UsageSanityChecker usc = new UsageSanityChecker();
|
||||
try {
|
||||
String errors = usc.runSanityCheck();
|
||||
if(errors.length() > 0){
|
||||
_alertMgr.sendAlert(AlertManager.ALERT_TYPE_USAGE_SANITY_RESULT, 0, new Long(0), "Usage Sanity Check failed", errors);
|
||||
} else {
|
||||
_alertMgr.clearAlert(AlertManager.ALERT_TYPE_USAGE_SANITY_RESULT, 0, 0);
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
s_logger.error("Error in sanity check", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,20 +1,5 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
* This software is licensed under the GNU General Public License v3 or later.
|
||||
*
|
||||
* It is free software: you can redistribute it and/or modify
|
||||
* it under the terms of the GNU General Public License as published by
|
||||
* the Free Software Foundation, either version 3 of the License, or any later version.
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
@ -1,5 +1,7 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage;
|
||||
|
||||
@ -1,12 +1,16 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
import java.util.Calendar;
|
||||
import java.text.DecimalFormat;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
@ -17,108 +21,145 @@ import com.cloud.usage.UsageVO;
|
||||
import com.cloud.usage.dao.UsageDao;
|
||||
import com.cloud.usage.dao.UsageIPAddressDao;
|
||||
import com.cloud.user.AccountVO;
|
||||
import com.cloud.utils.Pair;
|
||||
import com.cloud.utils.component.ComponentLocator;
|
||||
|
||||
public class IPAddressUsageParser {
|
||||
public static final Logger s_logger = Logger.getLogger(IPAddressUsageParser.class.getName());
|
||||
|
||||
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
|
||||
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
|
||||
private static UsageIPAddressDao m_usageIPAddressDao = _locator.getDao(UsageIPAddressDao.class);
|
||||
|
||||
// FIXME: IP Address stuff will be in the helper table and not really rolled up to usage table since it doesn't make sense to have it that way
|
||||
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
|
||||
s_logger.info("Parsing all ip address usage events");
|
||||
public static final Logger s_logger = Logger.getLogger(IPAddressUsageParser.class.getName());
|
||||
|
||||
// FIXME: endDate should be 23:59:59 of the day in question if it's not after the current date (or null)
|
||||
if ((endDate == null) || endDate.after(new Date())) {
|
||||
endDate = new Date();
|
||||
}
|
||||
private static ComponentLocator _locator = ComponentLocator.getLocator(UsageServer.Name, "usage-components.xml", "log4j-cloud_usage");
|
||||
private static UsageDao m_usageDao = _locator.getDao(UsageDao.class);
|
||||
private static UsageIPAddressDao m_usageIPAddressDao = _locator.getDao(UsageIPAddressDao.class);
|
||||
|
||||
|
||||
public static boolean parse(AccountVO account, Date startDate, Date endDate) {
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("Parsing IP Address usage for account: " + account.getId());
|
||||
}
|
||||
if ((endDate == null) || endDate.after(new Date())) {
|
||||
endDate = new Date();
|
||||
}
|
||||
|
||||
// - query usage_ip_address table with the following criteria:
|
||||
// - look for an entry for accountId with start date in the given range
|
||||
// - look for an entry for accountId with end date in the given range
|
||||
// - look for an entry for accountId with end date null (currently running vm or owned IP)
|
||||
// - look for an entry for accountId with start date before given range *and* end date after given range
|
||||
List<UsageIPAddressVO> usageIPAddress = m_usageIPAddressDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate);
|
||||
|
||||
if(usageIPAddress.isEmpty()){
|
||||
s_logger.debug("No IP Address usage for this period");
|
||||
return true;
|
||||
}
|
||||
|
||||
// This map has both the running time *and* the usage amount.
|
||||
Map<String, Pair<Long, Long>> usageMap = new HashMap<String, Pair<Long, Long>>();
|
||||
|
||||
Map<String, IpInfo> IPMap = new HashMap<String, IpInfo>();
|
||||
|
||||
// loop through all the usage IPs, create a usage record for each
|
||||
for (UsageIPAddressVO usageIp : usageIPAddress) {
|
||||
long IpId = usageIp.getId();
|
||||
|
||||
String key = ""+IpId;
|
||||
|
||||
// store the info in the IP map
|
||||
IPMap.put(key, new IpInfo(usageIp.getZoneId(), IpId, usageIp.getAddress(), usageIp.isSourceNat()));
|
||||
|
||||
Date IpAssignDate = usageIp.getAssigned();
|
||||
Date IpReleaseDeleteDate = usageIp.getReleased();
|
||||
|
||||
if ((IpReleaseDeleteDate == null) || IpReleaseDeleteDate.after(endDate)) {
|
||||
IpReleaseDeleteDate = endDate;
|
||||
}
|
||||
|
||||
// clip the start date to the beginning of our aggregation range if the vm has been running for a while
|
||||
if (IpAssignDate.before(startDate)) {
|
||||
IpAssignDate = startDate;
|
||||
}
|
||||
|
||||
long currentDuration = (IpReleaseDeleteDate.getTime() - IpAssignDate.getTime()) + 1; // make sure this is an inclusive check for milliseconds (i.e. use n - m + 1 to find total number of millis to charge)
|
||||
|
||||
updateIpUsageData(usageMap, key, usageIp.getId(), currentDuration);
|
||||
}
|
||||
|
||||
for (String ipIdKey : usageMap.keySet()) {
|
||||
Pair<Long, Long> ipTimeInfo = usageMap.get(ipIdKey);
|
||||
long useTime = ipTimeInfo.second().longValue();
|
||||
|
||||
// Only create a usage record if we have a runningTime of bigger than zero.
|
||||
if (useTime > 0L) {
|
||||
IpInfo info = IPMap.get(ipIdKey);
|
||||
createUsageRecord(info.getZoneId(), useTime, startDate, endDate, account, info.getIpId(), info.getIPAddress(), info.isSourceNat());
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private static void updateIpUsageData(Map<String, Pair<Long, Long>> usageDataMap, String key, long ipId, long duration) {
|
||||
Pair<Long, Long> ipUsageInfo = usageDataMap.get(key);
|
||||
if (ipUsageInfo == null) {
|
||||
ipUsageInfo = new Pair<Long, Long>(new Long(ipId), new Long(duration));
|
||||
} else {
|
||||
Long runningTime = ipUsageInfo.second();
|
||||
runningTime = new Long(runningTime.longValue() + duration);
|
||||
ipUsageInfo = new Pair<Long, Long>(ipUsageInfo.first(), runningTime);
|
||||
}
|
||||
usageDataMap.put(key, ipUsageInfo);
|
||||
}
|
||||
|
||||
private static void createUsageRecord(long zoneId, long runningTime, Date startDate, Date endDate, AccountVO account, long IpId, String IPAddress, boolean isSourceNat) {
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("Total usage time " + runningTime + "ms");
|
||||
}
|
||||
|
||||
float usage = runningTime / 1000f / 60f / 60f;
|
||||
|
||||
DecimalFormat dFormat = new DecimalFormat("#.######");
|
||||
String usageDisplay = dFormat.format(usage);
|
||||
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("Creating IP usage record with id: " + IpId + ", usage: " + usageDisplay + ", startDate: " + startDate + ", endDate: " + endDate + ", for account: " + account.getId());
|
||||
}
|
||||
|
||||
String usageDesc = "IPAddress: "+IPAddress;
|
||||
|
||||
// Create the usage record
|
||||
|
||||
UsageVO usageRecord = new UsageVO(zoneId, account.getAccountId(), account.getDomainId(), usageDesc, usageDisplay + " Hrs",
|
||||
UsageTypes.IP_ADDRESS, new Double(usage), null, null, null, null, IpId, startDate, endDate, (isSourceNat?"SourceNat":""));
|
||||
m_usageDao.persist(usageRecord);
|
||||
}
|
||||
|
||||
private static class IpInfo {
|
||||
private long zoneId;
|
||||
private long IpId;
|
||||
private String IPAddress;
|
||||
private boolean isSourceNat;
|
||||
|
||||
public IpInfo(long zoneId,long IpId, String IPAddress, boolean isSourceNat) {
|
||||
this.zoneId = zoneId;
|
||||
this.IpId = IpId;
|
||||
this.IPAddress = IPAddress;
|
||||
this.isSourceNat = isSourceNat;
|
||||
}
|
||||
|
||||
public long getZoneId() {
|
||||
return zoneId;
|
||||
}
|
||||
|
||||
public long getIpId() {
|
||||
return IpId;
|
||||
}
|
||||
|
||||
public String getIPAddress() {
|
||||
return IPAddress;
|
||||
}
|
||||
|
||||
public boolean isSourceNat() {
|
||||
return isSourceNat;
|
||||
}
|
||||
}
|
||||
|
||||
List<UsageIPAddressVO> usageInstances = m_usageIPAddressDao.getUsageRecords(account.getId(), account.getDomainId(), startDate, endDate, false, null, null);
|
||||
|
||||
// IP Addresses are billed monthly. In the given date range, figure out how many months occur and create a usage record
|
||||
// for each month
|
||||
// FIXME: as part of this usage record, we might want to say startTime/endTime during the month that the IP was allocated
|
||||
Calendar startCal = Calendar.getInstance();
|
||||
startCal.setTime(startDate);
|
||||
startCal.set(Calendar.DAY_OF_MONTH, 1);
|
||||
startCal.set(Calendar.HOUR_OF_DAY, 0);
|
||||
startCal.set(Calendar.MINUTE, 0);
|
||||
startCal.set(Calendar.SECOND, 0);
|
||||
startCal.set(Calendar.MILLISECOND, 0);
|
||||
|
||||
// set the end date to be the last day of the month
|
||||
Calendar endCal = Calendar.getInstance();
|
||||
endCal.setTime(endDate);
|
||||
endCal.set(Calendar.DAY_OF_MONTH, endCal.getActualMaximum(Calendar.DAY_OF_MONTH));
|
||||
|
||||
int numberOfMonths = 0;
|
||||
while (startCal.before(endCal)) {
|
||||
numberOfMonths++;
|
||||
startCal.roll(Calendar.MONTH, true);
|
||||
}
|
||||
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("processing " + numberOfMonths + " month(s) worth of ip address data");
|
||||
}
|
||||
|
||||
for (UsageIPAddressVO usageInstance : usageInstances) {
|
||||
String ipAddress = usageInstance.getAddress();
|
||||
Date assignedDate = usageInstance.getAssigned();
|
||||
Date releasedDate = usageInstance.getReleased();
|
||||
|
||||
// if the IP address is currently owned, bill for up to the current date
|
||||
if (releasedDate == null) {
|
||||
releasedDate = new Date();
|
||||
}
|
||||
|
||||
// reset startCal
|
||||
startCal.setTime(startDate);
|
||||
startCal.set(Calendar.DAY_OF_MONTH, 1);
|
||||
startCal.set(Calendar.HOUR_OF_DAY, 0);
|
||||
startCal.set(Calendar.MINUTE, 0);
|
||||
startCal.set(Calendar.SECOND, 0);
|
||||
startCal.set(Calendar.MILLISECOND, 0);
|
||||
|
||||
// TODO: this really needs to be tested well, and might be over-engineered for what we really need, but the
|
||||
// point is to count each month in which the IP address is owned and bill for that month
|
||||
// we know the number of months, create a usage record for each month
|
||||
// FIXME: this is supposed to create a usage record per month...first of all, that's super confusing and we need
|
||||
// to get out of the weekly/monthly/daily business and instead we need to say for a given range whether or
|
||||
// not the IP address was use. It's up to our customers to (a) give sensible date ranges for their own
|
||||
// usage purposes and (b)
|
||||
for (int i = 0; i < numberOfMonths; i++) {
|
||||
if (assignedDate.before(startCal.getTime())) {
|
||||
assignedDate = startCal.getTime();
|
||||
}
|
||||
startCal.roll(Calendar.MONTH, true);
|
||||
Date nextMonth = startCal.getTime();
|
||||
startCal.add(Calendar.MILLISECOND, -1);
|
||||
if (releasedDate.before(startCal.getTime())) {
|
||||
startCal.setTime(releasedDate);
|
||||
}
|
||||
createUsageRecord(assignedDate, startCal.getTime(), account, ipAddress, startDate, endDate);
|
||||
|
||||
// go to the start of the next month for the next iteration
|
||||
startCal.setTime(nextMonth);
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// TODO: ip address usage comes from the usage_ip_address table, not cloud_usage table, so this is largely irrelevant and might be going away
|
||||
private static void createUsageRecord(Date assigned, Date ownedUntil, AccountVO account, String address, Date startDate, Date endDate) {
|
||||
if (s_logger.isDebugEnabled()) {
|
||||
s_logger.debug("Creating usage record for account: " + account.getId() + ", ip: " + address + ", assigned date: " + assigned + ", owned until: " + ownedUntil);
|
||||
}
|
||||
|
||||
// Create the usage record
|
||||
String usageDesc = "usage for ip address '" + address +
|
||||
"' (assigned on " + assigned + ", owned until " + ownedUntil + ")";
|
||||
UsageVO usageRecord = new UsageVO(Long.valueOf(0), account.getId(), account.getDomainId(), usageDesc, "1 Month", UsageTypes.IP_ADDRESS, Double.valueOf(1),
|
||||
null, null, null, null, null, null, startDate, endDate);
|
||||
m_usageDao.persist(usageRecord);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
@ -1,5 +1,6 @@
|
||||
/**
|
||||
* Copyright (C) 2011 Cloud.com, Inc. All rights reserved.
|
||||
* * Copyright (C) 2011 Citrix Systems, Inc. All rights reserved
|
||||
*
|
||||
*/
|
||||
|
||||
package com.cloud.usage.parser;
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user