mirror of
https://github.com/apache/cloudstack.git
synced 2025-10-26 08:42:29 +01:00
Merge branch '4.20'
This commit is contained in:
commit
b57994eeb0
@ -37,4 +37,6 @@ public interface UsageJobDao extends GenericDao<UsageJobVO, Long> {
|
|||||||
UsageJobVO isOwner(String hostname, int pid);
|
UsageJobVO isOwner(String hostname, int pid);
|
||||||
|
|
||||||
void updateJobSuccess(Long jobId, long startMillis, long endMillis, long execTime, boolean success);
|
void updateJobSuccess(Long jobId, long startMillis, long endMillis, long execTime, boolean success);
|
||||||
|
|
||||||
|
void removeLastOpenJobsOwned(String hostname, int pid);
|
||||||
}
|
}
|
||||||
|
|||||||
@ -22,6 +22,7 @@ import java.util.Date;
|
|||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
|
||||||
|
import org.apache.commons.collections.CollectionUtils;
|
||||||
import org.springframework.stereotype.Component;
|
import org.springframework.stereotype.Component;
|
||||||
|
|
||||||
import com.cloud.usage.UsageJobVO;
|
import com.cloud.usage.UsageJobVO;
|
||||||
@ -114,7 +115,7 @@ public class UsageJobDaoImpl extends GenericDaoBase<UsageJobVO, Long> implements
|
|||||||
public UsageJobVO isOwner(String hostname, int pid) {
|
public UsageJobVO isOwner(String hostname, int pid) {
|
||||||
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
||||||
try {
|
try {
|
||||||
if ((hostname == null) || (pid <= 0)) {
|
if (hostname == null || pid <= 0) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,7 +175,7 @@ public class UsageJobDaoImpl extends GenericDaoBase<UsageJobVO, Long> implements
|
|||||||
SearchCriteria<UsageJobVO> sc = createSearchCriteria();
|
SearchCriteria<UsageJobVO> sc = createSearchCriteria();
|
||||||
sc.addAnd("endMillis", SearchCriteria.Op.EQ, Long.valueOf(0));
|
sc.addAnd("endMillis", SearchCriteria.Op.EQ, Long.valueOf(0));
|
||||||
sc.addAnd("jobType", SearchCriteria.Op.EQ, Integer.valueOf(UsageJobVO.JOB_TYPE_SINGLE));
|
sc.addAnd("jobType", SearchCriteria.Op.EQ, Integer.valueOf(UsageJobVO.JOB_TYPE_SINGLE));
|
||||||
sc.addAnd("scheduled", SearchCriteria.Op.EQ, Integer.valueOf(0));
|
sc.addAnd("scheduled", SearchCriteria.Op.EQ, Integer.valueOf(UsageJobVO.JOB_NOT_SCHEDULED));
|
||||||
List<UsageJobVO> jobs = search(sc, filter);
|
List<UsageJobVO> jobs = search(sc, filter);
|
||||||
|
|
||||||
if ((jobs == null) || jobs.isEmpty()) {
|
if ((jobs == null) || jobs.isEmpty()) {
|
||||||
@ -194,4 +195,36 @@ public class UsageJobDaoImpl extends GenericDaoBase<UsageJobVO, Long> implements
|
|||||||
}
|
}
|
||||||
return jobs.get(0).getHeartbeat();
|
return jobs.get(0).getHeartbeat();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<UsageJobVO> getLastOpenJobsOwned(String hostname, int pid) {
|
||||||
|
SearchCriteria<UsageJobVO> sc = createSearchCriteria();
|
||||||
|
sc.addAnd("endMillis", SearchCriteria.Op.EQ, Long.valueOf(0));
|
||||||
|
sc.addAnd("host", SearchCriteria.Op.EQ, hostname);
|
||||||
|
if (pid > 0) {
|
||||||
|
sc.addAnd("pid", SearchCriteria.Op.EQ, Integer.valueOf(pid));
|
||||||
|
}
|
||||||
|
return listBy(sc);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removeLastOpenJobsOwned(String hostname, int pid) {
|
||||||
|
if (hostname == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
||||||
|
try {
|
||||||
|
List<UsageJobVO> jobs = getLastOpenJobsOwned(hostname, pid);
|
||||||
|
if (CollectionUtils.isNotEmpty(jobs)) {
|
||||||
|
logger.info("Found {} opens job, to remove", jobs.size());
|
||||||
|
for (UsageJobVO job : jobs) {
|
||||||
|
logger.debug("Removing job - id: {}, pid: {}, job type: {}, scheduled: {}, heartbeat: {}",
|
||||||
|
job.getId(), job.getPid(), job.getJobType(), job.getScheduled(), job.getHeartbeat());
|
||||||
|
remove(job.getId());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
txn.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -653,24 +653,10 @@ public class ScaleIOStorageAdaptor implements StorageAdaptor {
|
|||||||
if (!ScaleIOUtil.startSDCService()) {
|
if (!ScaleIOUtil.startSDCService()) {
|
||||||
return new Ternary<>(false, null, "Couldn't start SDC service on host");
|
return new Ternary<>(false, null, "Couldn't start SDC service on host");
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (MapUtils.isNotEmpty(details) && details.containsKey(ScaleIOGatewayClient.STORAGE_POOL_MDMS)) {
|
|
||||||
// Assuming SDC service is started, add mdms
|
|
||||||
String mdms = details.get(ScaleIOGatewayClient.STORAGE_POOL_MDMS);
|
|
||||||
String[] mdmAddresses = mdms.split(",");
|
|
||||||
if (mdmAddresses.length > 0) {
|
|
||||||
if (ScaleIOUtil.isMdmPresent(mdmAddresses[0])) {
|
|
||||||
return new Ternary<>(true, getSDCDetails(details), "MDM added, no need to prepare the SDC client");
|
|
||||||
}
|
|
||||||
|
|
||||||
ScaleIOUtil.addMdms(mdmAddresses);
|
|
||||||
if (!ScaleIOUtil.isMdmPresent(mdmAddresses[0])) {
|
|
||||||
return new Ternary<>(false, null, "Failed to add MDMs");
|
|
||||||
} else {
|
} else {
|
||||||
logger.debug(String.format("MDMs %s added to storage pool %s", mdms, uuid));
|
logger.debug("SDC service is active on host, re-starting it");
|
||||||
applyMdmsChangeWaitTime(details);
|
if (!ScaleIOUtil.restartSDCService()) {
|
||||||
}
|
return new Ternary<>(false, null, "Couldn't restart SDC service on host");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -798,13 +784,13 @@ public class ScaleIOStorageAdaptor implements StorageAdaptor {
|
|||||||
if (sdcId != null) {
|
if (sdcId != null) {
|
||||||
sdcDetails.put(ScaleIOGatewayClient.SDC_ID, sdcId);
|
sdcDetails.put(ScaleIOGatewayClient.SDC_ID, sdcId);
|
||||||
return sdcDetails;
|
return sdcDetails;
|
||||||
}
|
} else {
|
||||||
|
|
||||||
String sdcGuId = ScaleIOUtil.getSdcGuid();
|
String sdcGuId = ScaleIOUtil.getSdcGuid();
|
||||||
if (sdcGuId != null) {
|
if (sdcGuId != null) {
|
||||||
sdcDetails.put(ScaleIOGatewayClient.SDC_GUID, sdcGuId);
|
sdcDetails.put(ScaleIOGatewayClient.SDC_GUID, sdcGuId);
|
||||||
return sdcDetails;
|
return sdcDetails;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
Thread.sleep(timeBetweenTries);
|
Thread.sleep(timeBetweenTries);
|
||||||
|
|||||||
@ -92,6 +92,34 @@ public class ScaleIOStorageAdaptorTest {
|
|||||||
Assert.assertEquals("SDC service not enabled on host", result.third());
|
Assert.assertEquals("SDC service not enabled on host", result.third());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPrepareStorageClient_SDCServiceNotRestarted() {
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl status scini"))).thenReturn(3);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl is-enabled scini"))).thenReturn(0);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl is-active scini"))).thenReturn(0);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl restart scini"))).thenReturn(1);
|
||||||
|
|
||||||
|
Ternary<Boolean, Map<String, String>, String> result = scaleIOStorageAdaptor.prepareStorageClient(poolUuid, new HashMap<>());
|
||||||
|
|
||||||
|
Assert.assertFalse(result.first());
|
||||||
|
Assert.assertNull(result.second());
|
||||||
|
Assert.assertEquals("Couldn't restart SDC service on host", result.third());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testPrepareStorageClient_SDCServiceRestarted() {
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl status scini"))).thenReturn(3);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl is-enabled scini"))).thenReturn(0);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl is-active scini"))).thenReturn(0);
|
||||||
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl restart scini"))).thenReturn(0);
|
||||||
|
|
||||||
|
Ternary<Boolean, Map<String, String>, String> result = scaleIOStorageAdaptor.prepareStorageClient(poolUuid, new HashMap<>());
|
||||||
|
|
||||||
|
Assert.assertFalse(result.first());
|
||||||
|
Assert.assertNull(result.second());
|
||||||
|
Assert.assertEquals("Couldn't get the SDC details on the host", result.third());
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testPrepareStorageClient_SDCServiceNotStarted() {
|
public void testPrepareStorageClient_SDCServiceNotStarted() {
|
||||||
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl status scini"))).thenReturn(3);
|
when(Script.runSimpleBashScriptForExitValue(Mockito.eq("systemctl status scini"))).thenReturn(3);
|
||||||
|
|||||||
2
pom.xml
2
pom.xml
@ -50,7 +50,7 @@
|
|||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
|
||||||
<project.systemvm.template.location>https://download.cloudstack.org/systemvm</project.systemvm.template.location>
|
<project.systemvm.template.location>https://download.cloudstack.org/systemvm</project.systemvm.template.location>
|
||||||
<project.systemvm.template.version>4.20.1.0</project.systemvm.template.version>
|
<project.systemvm.template.version>4.20.2.0</project.systemvm.template.version>
|
||||||
<sonar.organization>apache</sonar.organization>
|
<sonar.organization>apache</sonar.organization>
|
||||||
<sonar.host.url>https://sonarcloud.io</sonar.host.url>
|
<sonar.host.url>https://sonarcloud.io</sonar.host.url>
|
||||||
|
|
||||||
|
|||||||
@ -68,6 +68,7 @@ function zero_disk() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function finalize() {
|
function finalize() {
|
||||||
|
depmod -a
|
||||||
configure_misc
|
configure_misc
|
||||||
configure_rundisk_size
|
configure_rundisk_size
|
||||||
configure_sudoers
|
configure_sudoers
|
||||||
|
|||||||
@ -312,6 +312,9 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
|
|||||||
logger.info("Starting Usage Manager");
|
logger.info("Starting Usage Manager");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_usageJobDao.removeLastOpenJobsOwned(_hostname, 0);
|
||||||
|
Runtime.getRuntime().addShutdownHook(new AbandonJob());
|
||||||
|
|
||||||
// use the configured exec time and aggregation duration for scheduling the job
|
// use the configured exec time and aggregation duration for scheduling the job
|
||||||
_scheduledFuture =
|
_scheduledFuture =
|
||||||
_executor.scheduleAtFixedRate(this, _jobExecTime.getTimeInMillis() - System.currentTimeMillis(), _aggregationDuration * 60 * 1000, TimeUnit.MILLISECONDS);
|
_executor.scheduleAtFixedRate(this, _jobExecTime.getTimeInMillis() - System.currentTimeMillis(), _aggregationDuration * 60 * 1000, TimeUnit.MILLISECONDS);
|
||||||
@ -324,7 +327,6 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
|
|||||||
_sanity = _sanityExecutor.scheduleAtFixedRate(new SanityCheck(), 1, _sanityCheckInterval, TimeUnit.DAYS);
|
_sanity = _sanityExecutor.scheduleAtFixedRate(new SanityCheck(), 1, _sanityCheckInterval, TimeUnit.DAYS);
|
||||||
}
|
}
|
||||||
|
|
||||||
Runtime.getRuntime().addShutdownHook(new AbandonJob());
|
|
||||||
TransactionLegacy usageTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
TransactionLegacy usageTxn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
||||||
try {
|
try {
|
||||||
if (_heartbeatLock.lock(3)) { // 3 second timeout
|
if (_heartbeatLock.lock(3)) { // 3 second timeout
|
||||||
@ -2148,19 +2150,17 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
|
|||||||
// the aggregation range away from executing the next job
|
// the aggregation range away from executing the next job
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
long timeToJob = _jobExecTime.getTimeInMillis() - now;
|
long timeToJob = _jobExecTime.getTimeInMillis() - now;
|
||||||
long timeSinceJob = 0;
|
long timeSinceLastSuccessJob = 0;
|
||||||
long aggregationDurationMillis = _aggregationDuration * 60L * 1000L;
|
long aggregationDurationMillis = _aggregationDuration * 60L * 1000L;
|
||||||
long lastSuccess = _usageJobDao.getLastJobSuccessDateMillis();
|
long lastSuccess = _usageJobDao.getLastJobSuccessDateMillis();
|
||||||
if (lastSuccess > 0) {
|
if (lastSuccess > 0) {
|
||||||
timeSinceJob = now - lastSuccess;
|
timeSinceLastSuccessJob = now - lastSuccess;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((timeSinceJob > 0) && (timeSinceJob > (aggregationDurationMillis - 100))) {
|
if ((timeSinceLastSuccessJob > 0) && (timeSinceLastSuccessJob > (aggregationDurationMillis - 100))) {
|
||||||
if (timeToJob > (aggregationDurationMillis / 2)) {
|
if (timeToJob > (aggregationDurationMillis / 2)) {
|
||||||
if (logger.isDebugEnabled()) {
|
logger.debug("it's been {} ms since last usage job and {} ms until next job, scheduling an immediate job to catch up (aggregation duration is {} minutes)"
|
||||||
logger.debug("it's been " + timeSinceJob + " ms since last usage job and " + timeToJob +
|
, timeSinceLastSuccessJob, timeToJob, _aggregationDuration);
|
||||||
" ms until next job, scheduling an immediate job to catch up (aggregation duration is " + _aggregationDuration + " minutes)");
|
|
||||||
}
|
|
||||||
scheduleParse();
|
scheduleParse();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -2245,17 +2245,12 @@ public class UsageManagerImpl extends ManagerBase implements UsageManager, Runna
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class AbandonJob extends Thread {
|
private class AbandonJob extends Thread {
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
logger.info("exitting Usage Manager");
|
logger.info("exiting Usage Manager");
|
||||||
deleteOpenjob();
|
_usageJobDao.removeLastOpenJobsOwned(_hostname, _pid);
|
||||||
}
|
|
||||||
private void deleteOpenjob() {
|
|
||||||
UsageJobVO job = _usageJobDao.isOwner(_hostname, _pid);
|
|
||||||
if (job != null) {
|
|
||||||
_usageJobDao.remove(job.getId());
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user