mirror of
https://github.com/apache/cloudstack.git
synced 2025-12-16 10:32:34 +01:00
Compare commits
71 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
da1c7cebf9 | ||
|
|
39d0d62fdd | ||
|
|
f570e16836 | ||
|
|
1919dcfb7c | ||
|
|
f417c6b0a1 | ||
|
|
78f9e6584b | ||
|
|
cfe96026dc | ||
|
|
3c6484792d | ||
|
|
51910cd260 | ||
|
|
5151f8dc6a | ||
|
|
c81295439f | ||
|
|
b0d74fe00c | ||
|
|
a0ba2aaf3f | ||
|
|
4379666fb6 | ||
|
|
e4414d1c44 | ||
|
|
26009659f9 | ||
|
|
2941b518ba | ||
|
|
f3a112fd9e | ||
|
|
243f566a60 | ||
|
|
516012a0b4 | ||
|
|
44119cf34f | ||
|
|
db6147060b | ||
|
|
f379d78963 | ||
|
|
5798fb43a3 | ||
|
|
4e61ddd1bc | ||
|
|
9032fe3fb5 | ||
|
|
e23c7ef701 | ||
|
|
e33f4754f5 | ||
|
|
9ec8cc4186 | ||
|
|
8171d9568c | ||
|
|
dba889ea3e | ||
|
|
6dc259c7da | ||
|
|
39126a4339 | ||
|
|
aa18188d30 | ||
|
|
4ed86a2627 | ||
|
|
86ae1fee7f | ||
|
|
21d844ba1c | ||
|
|
ac3b18095a | ||
|
|
fff4cafdca | ||
|
|
a5b455ff3a | ||
|
|
8b034dc439 | ||
|
|
028dd86945 | ||
|
|
dc8f465527 | ||
|
|
e90e31d386 | ||
|
|
f985a67f4d | ||
|
|
5f9e131198 | ||
|
|
f0a0936675 | ||
|
|
671d8ad704 | ||
|
|
81787b310e | ||
|
|
23fb0e2ccb | ||
|
|
40c8bc528d | ||
|
|
15439ede7d | ||
|
|
50fe265017 | ||
|
|
d26122bf22 | ||
|
|
2dd1e6d786 | ||
|
|
8c86f24261 | ||
|
|
2954e96947 | ||
|
|
c5c3cc40c1 | ||
|
|
9c0efb7072 | ||
|
|
b8ec941ec1 | ||
|
|
8230f04a79 | ||
|
|
a50de029bf | ||
|
|
81b2c38be9 | ||
|
|
ac8c200790 | ||
|
|
5504b053e4 | ||
|
|
dbda673e1f | ||
|
|
e66926e6a4 | ||
|
|
d160731b9f | ||
|
|
15c2e50338 | ||
|
|
d53b6dbda4 | ||
|
|
e90e436ef8 |
22
.github/CODEOWNERS
vendored
Normal file
22
.github/CODEOWNERS
vendored
Normal file
@ -0,0 +1,22 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
/plugins/storage/volume/linstor @rp-
|
||||
/plugins/storage/volume/storpool @slavkap
|
||||
|
||||
.pre-commit-config.yaml @jbampton
|
||||
/.github/linters/ @jbampton
|
||||
3
.github/linters/.yamllint.yml
vendored
3
.github/linters/.yamllint.yml
vendored
@ -15,13 +15,14 @@
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
---
|
||||
extends: relaxed
|
||||
extends: default
|
||||
|
||||
rules:
|
||||
line-length:
|
||||
max: 400 # Very forgiving for GitHub Actions and infrastructure files
|
||||
indentation: disable # Disable indentation checking for existing files
|
||||
comments: disable # Disable comment formatting checks
|
||||
braces: disable
|
||||
brackets: disable # Disable bracket spacing checks
|
||||
colons:
|
||||
max-spaces-after: -1 # Allow any number of spaces after colon
|
||||
|
||||
47
.github/linters/codespell.txt
vendored
47
.github/linters/codespell.txt
vendored
@ -4,6 +4,7 @@ acount
|
||||
actuall
|
||||
acuiring
|
||||
acumulate
|
||||
addin
|
||||
addreess
|
||||
addtion
|
||||
adminstrator
|
||||
@ -12,10 +13,8 @@ afrer
|
||||
afterall
|
||||
againt
|
||||
ags
|
||||
aktive
|
||||
algoritm
|
||||
allo
|
||||
alloacate
|
||||
allocted
|
||||
alocation
|
||||
alogrithm
|
||||
@ -65,6 +64,7 @@ bject
|
||||
boardcast
|
||||
bootstraper
|
||||
bu
|
||||
callin
|
||||
cant
|
||||
capabilites
|
||||
capablity
|
||||
@ -73,6 +73,7 @@ carrefully
|
||||
cavaet
|
||||
chaing
|
||||
checkd
|
||||
checkin
|
||||
childs
|
||||
choosen
|
||||
chould
|
||||
@ -93,7 +94,6 @@ confg
|
||||
configruation
|
||||
configuable
|
||||
conneciton
|
||||
connexion
|
||||
constrait
|
||||
constraits
|
||||
containg
|
||||
@ -101,9 +101,7 @@ contex
|
||||
continuesly
|
||||
contro
|
||||
controler
|
||||
controles
|
||||
controll
|
||||
convienient
|
||||
convinience
|
||||
coputer
|
||||
correcponding
|
||||
@ -158,13 +156,13 @@ differnet
|
||||
differnt
|
||||
direcotry
|
||||
directroy
|
||||
disale
|
||||
disbale
|
||||
discrepency
|
||||
disover
|
||||
dissapper
|
||||
dissassociated
|
||||
divice
|
||||
dockin
|
||||
doesn'
|
||||
doesnot
|
||||
doesnt
|
||||
@ -175,7 +173,6 @@ eanbled
|
||||
earch
|
||||
ect
|
||||
elemnt
|
||||
eles
|
||||
elments
|
||||
emmited
|
||||
enble
|
||||
@ -187,22 +184,19 @@ environmnet
|
||||
equivalant
|
||||
erro
|
||||
erronous
|
||||
everthing
|
||||
everytime
|
||||
excute
|
||||
execept
|
||||
execption
|
||||
exects
|
||||
execut
|
||||
executeable
|
||||
exeeded
|
||||
exisitng
|
||||
exisits
|
||||
existin
|
||||
existsing
|
||||
exitting
|
||||
expcted
|
||||
expection
|
||||
explaination
|
||||
explicitely
|
||||
faield
|
||||
faild
|
||||
@ -215,7 +209,6 @@ fillled
|
||||
findout
|
||||
fisrt
|
||||
fo
|
||||
folowing
|
||||
fowarding
|
||||
frist
|
||||
fro
|
||||
@ -234,6 +227,7 @@ hanling
|
||||
happend
|
||||
hasing
|
||||
hasnt
|
||||
havin
|
||||
hda
|
||||
hostanme
|
||||
hould
|
||||
@ -253,20 +247,14 @@ implmeneted
|
||||
implmentation
|
||||
incase
|
||||
includeing
|
||||
incosistency
|
||||
indecates
|
||||
indien
|
||||
infor
|
||||
informations
|
||||
informaton
|
||||
infrastrcuture
|
||||
ingore
|
||||
inital
|
||||
initalize
|
||||
initator
|
||||
initilization
|
||||
inspite
|
||||
instace
|
||||
instal
|
||||
instnace
|
||||
intefaces
|
||||
@ -284,12 +272,8 @@ ist
|
||||
klunky
|
||||
lable
|
||||
leve
|
||||
lief
|
||||
limite
|
||||
linke
|
||||
listner
|
||||
lokal
|
||||
lokales
|
||||
maintainence
|
||||
maintenace
|
||||
maintenence
|
||||
@ -298,7 +282,6 @@ mambers
|
||||
manaully
|
||||
manuel
|
||||
maxium
|
||||
mehtod
|
||||
mergable
|
||||
mesage
|
||||
messge
|
||||
@ -308,7 +291,6 @@ minumum
|
||||
mis
|
||||
modifers
|
||||
mor
|
||||
mot
|
||||
mulitply
|
||||
multipl
|
||||
multple
|
||||
@ -322,7 +304,7 @@ nin
|
||||
nodel
|
||||
nome
|
||||
noone
|
||||
nowe
|
||||
notin
|
||||
numbe
|
||||
numer
|
||||
occured
|
||||
@ -390,12 +372,9 @@ remaning
|
||||
remore
|
||||
remvoing
|
||||
renabling
|
||||
repeatly
|
||||
reponse
|
||||
reqest
|
||||
reqiured
|
||||
requieres
|
||||
requried
|
||||
reserv
|
||||
reserverd
|
||||
reseted
|
||||
@ -414,14 +393,13 @@ retuned
|
||||
returing
|
||||
rever
|
||||
rocessor
|
||||
roperty
|
||||
runing
|
||||
runnign
|
||||
sate
|
||||
scalled
|
||||
scipt
|
||||
scirpt
|
||||
scrip
|
||||
seconadry
|
||||
seconday
|
||||
seesion
|
||||
sepcified
|
||||
@ -434,12 +412,10 @@ settig
|
||||
sevices
|
||||
shoul
|
||||
shoule
|
||||
sie
|
||||
signle
|
||||
simplier
|
||||
singature
|
||||
skiping
|
||||
snaphsot
|
||||
snpashot
|
||||
specied
|
||||
specifed
|
||||
@ -450,7 +426,6 @@ standy
|
||||
statics
|
||||
stickyness
|
||||
stil
|
||||
stip
|
||||
storeage
|
||||
strat
|
||||
streched
|
||||
@ -459,7 +434,6 @@ succesfull
|
||||
successfull
|
||||
suceessful
|
||||
suces
|
||||
sucessfully
|
||||
suiteable
|
||||
suppots
|
||||
suppport
|
||||
@ -492,7 +466,6 @@ uncompressible
|
||||
uneccessarily
|
||||
unexepected
|
||||
unexpect
|
||||
unknow
|
||||
unkonw
|
||||
unkown
|
||||
unneccessary
|
||||
@ -500,14 +473,12 @@ unparseable
|
||||
unrecoginized
|
||||
unsupport
|
||||
unxpected
|
||||
updat
|
||||
uptodate
|
||||
usera
|
||||
usign
|
||||
usin
|
||||
utlization
|
||||
vaidate
|
||||
valiate
|
||||
valule
|
||||
valus
|
||||
varibles
|
||||
@ -516,8 +487,6 @@ verfying
|
||||
verifing
|
||||
virutal
|
||||
visable
|
||||
wakup
|
||||
wil
|
||||
wit
|
||||
wll
|
||||
wth
|
||||
|
||||
28
.github/workflows/dependabot.yaml
vendored
Normal file
28
.github/workflows/dependabot.yaml
vendored
Normal file
@ -0,0 +1,28 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# To get started with Dependabot version updates, you'll need to specify which
|
||||
# package ecosystems to update and where the package manifests are located.
|
||||
# Please see the documentation for all configuration options:
|
||||
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "maven" # See documentation for possible values
|
||||
directory: "/" # Location of package manifests
|
||||
schedule:
|
||||
interval: "daily"
|
||||
@ -44,6 +44,6 @@ jobs:
|
||||
path: ~/.cache/pre-commit
|
||||
key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Run pre-commit
|
||||
run: pre-commit run --all-files
|
||||
run: pre-commit run --color=always --all-files
|
||||
- name: Run manual pre-commit hooks
|
||||
run: pre-commit run --all-files --hook-stage manual
|
||||
run: pre-commit run --color=always --all-files --hook-stage manual
|
||||
@ -25,6 +25,12 @@ repos:
|
||||
hooks:
|
||||
- id: identity
|
||||
- id: check-hooks-apply
|
||||
- repo: https://github.com/thlorenz/doctoc.git
|
||||
rev: v2.2.0
|
||||
hooks:
|
||||
- id: doctoc
|
||||
name: Add TOC for Markdown files
|
||||
files: ^CONTRIBUTING\.md$|^INSTALL\.md$|^README\.md$
|
||||
- repo: https://github.com/oxipng/oxipng
|
||||
rev: v9.1.5
|
||||
hooks:
|
||||
@ -41,6 +47,11 @@ repos:
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||
rev: v1.5.5
|
||||
hooks:
|
||||
- id: chmod
|
||||
name: set file permissions
|
||||
args: ['644']
|
||||
files: \.md$
|
||||
stages: [manual]
|
||||
- id: insert-license
|
||||
name: add license for all Markdown files
|
||||
files: \.md$
|
||||
@ -51,6 +62,44 @@ repos:
|
||||
- .github/workflows/license-templates/LICENSE.txt
|
||||
- --fuzzy-match-generates-todo
|
||||
exclude: ^(CHANGES|ISSUE_TEMPLATE|PULL_REQUEST_TEMPLATE)\.md$|^ui/docs/(full|smoke)-test-plan\.template\.md$
|
||||
- id: insert-license
|
||||
name: add license for all Shell files
|
||||
description: automatically adds a licence header to all Shell files that don't have a license header
|
||||
files: \.sh$
|
||||
args:
|
||||
- --comment-style
|
||||
- '|#|'
|
||||
- --license-filepath
|
||||
- .github/workflows/license-templates/LICENSE.txt
|
||||
- --fuzzy-match-generates-todo
|
||||
- id: insert-license
|
||||
name: add license for all SQL files
|
||||
files: \.sql$
|
||||
args:
|
||||
- --comment-style
|
||||
- '|--|'
|
||||
- --license-filepath
|
||||
- .github/workflows/license-templates/LICENSE.txt
|
||||
- --fuzzy-match-generates-todo
|
||||
- id: insert-license
|
||||
name: add license for all Vue files
|
||||
files: \.vue$
|
||||
args:
|
||||
- --comment-style
|
||||
- '|//|'
|
||||
- --license-filepath
|
||||
- .github/workflows/license-templates/LICENSE.txt
|
||||
- --fuzzy-match-generates-todo
|
||||
- id: insert-license
|
||||
name: add license for all YAML files
|
||||
description: automatically adds a licence header to all YAML files that don't have a license header
|
||||
files: \.ya?ml$
|
||||
args:
|
||||
- --comment-style
|
||||
- '|#|'
|
||||
- --license-filepath
|
||||
- .github/workflows/license-templates/LICENSE.txt
|
||||
- --fuzzy-match-generates-todo
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v6.0.0
|
||||
hooks:
|
||||
@ -84,7 +133,7 @@ repos:
|
||||
^systemvm/agent/certs/realhostip\.key$|
|
||||
^test/integration/smoke/test_ssl_offloading\.py$
|
||||
- id: end-of-file-fixer
|
||||
exclude: \.vhd$
|
||||
exclude: \.vhd$|\.svg$
|
||||
- id: file-contents-sorter
|
||||
args: [--unique]
|
||||
files: ^\.github/linters/codespell\.txt$
|
||||
@ -92,11 +141,11 @@ repos:
|
||||
- id: forbid-submodules
|
||||
- id: mixed-line-ending
|
||||
- id: trailing-whitespace
|
||||
files: \.(bat|cfg|cs|css|gitignore|header|in|install|java|md|properties|py|rb|rc|sh|sql|te|template|txt|ucls|vue|xml|xsl|yaml|yml)$|^cloud-cli/bindir/cloud-tool$|^debian/changelog$
|
||||
files: ^(LICENSE|NOTICE)$|\.(bat|cfg|cs|css|gitignore|header|in|install|java|md|properties|py|rb|rc|sh|sql|te|template|txt|ucls|vue|xml|xsl|yaml|yml)$|^cloud-cli/bindir/cloud-tool$|^debian/changelog$
|
||||
args: [--markdown-linebreak-ext=md]
|
||||
exclude: ^services/console-proxy/rdpconsole/src/test/doc/freerdp-debug-log\.txt$
|
||||
- repo: https://github.com/codespell-project/codespell
|
||||
rev: v2.2.6
|
||||
rev: v2.4.1
|
||||
hooks:
|
||||
- id: codespell
|
||||
name: run codespell
|
||||
@ -117,14 +166,6 @@ repos:
|
||||
args: [--config=.github/linters/.markdown-lint.yml]
|
||||
types: [markdown]
|
||||
files: \.(md|mdown|markdown)$
|
||||
- repo: https://github.com/Lucas-C/pre-commit-hooks
|
||||
rev: v1.5.5
|
||||
hooks:
|
||||
- id: chmod
|
||||
name: set file permissions
|
||||
args: ['644']
|
||||
files: \.md$
|
||||
stages: [manual]
|
||||
- repo: https://github.com/adrienverge/yamllint
|
||||
rev: v1.37.1
|
||||
hooks:
|
||||
|
||||
@ -21,6 +21,24 @@
|
||||
|
||||
## Summary
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Summary](#summary)
|
||||
- [Bug fixes](#bug-fixes)
|
||||
- [Developing new features](#developing-new-features)
|
||||
- [PendingReleaseNotes file](#pendingreleasenotes-file)
|
||||
- [Fork the code](#fork-the-code)
|
||||
- [Making changes](#making-changes)
|
||||
- [Rebase `feature_x` to include updates from `upstream/main`](#rebase-feature_x-to-include-updates-from-upstreammain)
|
||||
- [Make a GitHub Pull Request to contribute your changes](#make-a-github-pull-request-to-contribute-your-changes)
|
||||
- [Cleaning up after a successful pull request](#cleaning-up-after-a-successful-pull-request)
|
||||
- [Release Principles](#release-principles)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
## Summary
|
||||
|
||||
This document covers how to contribute to the ACS project. ACS uses GitHub PRs to manage code contributions.
|
||||
These instructions assume you have a GitHub.com account, so if you don't have one you will have to create one. Your proposed code changes will be published to your own fork of the ACS project, and you will submit a Pull Request for your changes to be added.
|
||||
|
||||
|
||||
18
INSTALL.md
18
INSTALL.md
@ -26,9 +26,21 @@ or the developer [wiki](https://cwiki.apache.org/confluence/display/CLOUDSTACK/H
|
||||
Apache CloudStack developers use various platforms for development, this guide
|
||||
was tested against a CentOS 7 x86_64 setup.
|
||||
|
||||
* [Setting up development environment](https://cwiki.apache.org/confluence/display/CLOUDSTACK/Setting+up+CloudStack+Development+Environment) for Apache CloudStack.
|
||||
* [Building](https://cwiki.apache.org/confluence/display/CLOUDSTACK/How+to+build+CloudStack) Apache CloudStack.
|
||||
* [Appliance based development](https://github.com/rhtyd/monkeybox)
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Setting up Development Environment](#setting-up-development-environment)
|
||||
- [Using jenv and/or pyenv for Version Management](#using-jenv-andor-pyenv-for-version-management)
|
||||
- [Getting the Source Code](#getting-the-source-code)
|
||||
- [Building](#building)
|
||||
- [To bring up CloudStack UI](#to-bring-up-cloudstack-ui)
|
||||
- [Building with non-redistributable plugins](#building-with-non-redistributable-plugins)
|
||||
- [Packaging and Installation](#packaging-and-installation)
|
||||
- [Debian/Ubuntu](#debianubuntu)
|
||||
- [RHEL/CentOS](#rhelcentos)
|
||||
- [Notes](#notes)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
## Setting up Development Environment
|
||||
|
||||
|
||||
@ -20,7 +20,7 @@
|
||||
# pre-commit
|
||||
|
||||
We run [pre-commit](https://pre-commit.com/) with
|
||||
[GitHub Actions](https://github.com/apache/cloudstack/blob/main/.github/workflows/linter.yml) so installation on your
|
||||
[GitHub Actions](https://github.com/apache/cloudstack/blob/main/.github/workflows/pre-commit.yml) so installation on your
|
||||
local machine is currently optional.
|
||||
|
||||
The `pre-commit` [configuration file](https://github.com/apache/cloudstack/blob/main/.pre-commit-config.yaml)
|
||||
18
README.md
18
README.md
@ -31,6 +31,24 @@
|
||||
|
||||
[](https://cloudstack.apache.org/)
|
||||
|
||||
<!-- START doctoc generated TOC please keep comment here to allow auto update -->
|
||||
<!-- DON'T EDIT THIS SECTION, INSTEAD RE-RUN doctoc TO UPDATE -->
|
||||
|
||||
- [Who Uses CloudStack?](#who-uses-cloudstack)
|
||||
- [Demo](#demo)
|
||||
- [Getting Started](#getting-started)
|
||||
- [Getting Source Repository](#getting-source-repository)
|
||||
- [Documentation](#documentation)
|
||||
- [News and Events](#news-and-events)
|
||||
- [Getting Involved and Contributing](#getting-involved-and-contributing)
|
||||
- [Reporting Security Vulnerabilities](#reporting-security-vulnerabilities)
|
||||
- [License](#license)
|
||||
- [Notice of Cryptographic Software](#notice-of-cryptographic-software)
|
||||
- [Star History](#star-history)
|
||||
- [Contributors](#contributors)
|
||||
|
||||
<!-- END doctoc generated TOC please keep comment here to allow auto update -->
|
||||
|
||||
Apache CloudStack is open source software designed to deploy and manage large
|
||||
networks of virtual machines, as a highly available, highly scalable
|
||||
Infrastructure as a Service (IaaS) cloud computing platform. CloudStack is used
|
||||
|
||||
24
agent/conf/uefi.properties.in
Normal file
24
agent/conf/uefi.properties.in
Normal file
@ -0,0 +1,24 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one
|
||||
# or more contributor license agreements. See the NOTICE file
|
||||
# distributed with this work for additional information
|
||||
# regarding copyright ownership. The ASF licenses this file
|
||||
# to you under the Apache License, Version 2.0 (the
|
||||
# "License"); you may not use this file except in compliance
|
||||
# with the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing,
|
||||
# software distributed under the License is distributed on an
|
||||
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
# KIND, either express or implied. See the License for the
|
||||
# specific language governing permissions and limitations
|
||||
# under the License.
|
||||
|
||||
# Configuration file for UEFI
|
||||
|
||||
guest.nvram.template.legacy=@GUESTNVRAMTEMPLATELEGACY@
|
||||
guest.loader.legacy=@GUESTLOADERLEGACY@
|
||||
guest.nvram.template.secure=@GUESTNVRAMTEMPLATESECURE@
|
||||
guest.loader.secure=@GUESTLOADERSECURE@
|
||||
guest.nvram.path=@GUESTNVRAMPATH@
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
||||
@ -1322,7 +1322,6 @@ public class Agent implements HandlerFactory, IAgentControl, AgentStatusUpdater
|
||||
processResponse((Response)request, task.getLink());
|
||||
} else {
|
||||
//put the requests from mgt server into another thread pool, as the request may take a longer time to finish. Don't block the NIO main thread pool
|
||||
//processRequest(request, task.getLink());
|
||||
requestHandler.submit(new AgentRequestHandler(getType(), getLink(), request));
|
||||
}
|
||||
} catch (final ClassNotFoundException e) {
|
||||
@ -1332,13 +1331,14 @@ public class Agent implements HandlerFactory, IAgentControl, AgentStatusUpdater
|
||||
}
|
||||
} else if (task.getType() == Task.Type.DISCONNECT) {
|
||||
try {
|
||||
// an issue has been found if reconnect immediately after disconnecting. please refer to https://github.com/apache/cloudstack/issues/8517
|
||||
// an issue has been found if reconnect immediately after disconnecting.
|
||||
// wait 5 seconds before reconnecting
|
||||
logger.debug("Wait for 5 secs before reconnecting, disconnect task - {}", () -> getLinkLog(task.getLink()));
|
||||
Thread.sleep(5000);
|
||||
} catch (InterruptedException e) {
|
||||
}
|
||||
shell.setConnectionTransfer(false);
|
||||
logger.debug("Executing disconnect task - {}", () -> getLinkLog(task.getLink()));
|
||||
logger.debug("Executing disconnect task - {} and reconnecting", () -> getLinkLog(task.getLink()));
|
||||
reconnect(task.getLink());
|
||||
} else if (task.getType() == Task.Type.OTHER) {
|
||||
processOtherTask(task);
|
||||
|
||||
@ -117,7 +117,7 @@ public class AgentProperties{
|
||||
|
||||
/**
|
||||
* Local storage path.<br>
|
||||
* This property allows multiple values to be entered in a single String. The differente values must be separated by commas.<br>
|
||||
* This property allows multiple values to be entered in a single String. The different values must be separated by commas.<br>
|
||||
* Data type: String.<br>
|
||||
* Default value: <code>/var/lib/libvirt/images/</code>
|
||||
*/
|
||||
@ -134,7 +134,7 @@ public class AgentProperties{
|
||||
|
||||
/**
|
||||
* MANDATORY: The UUID for the local storage pool.<br>
|
||||
* This property allows multiple values to be entered in a single String. The differente values must be separated by commas.<br>
|
||||
* This property allows multiple values to be entered in a single String. The different values must be separated by commas.<br>
|
||||
* Data type: String.<br>
|
||||
* Default value: <code>null</code>
|
||||
*/
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
||||
@ -0,0 +1,182 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
package com.cloud.agent.api.to;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class VirtualMachineMetadataTO {
|
||||
// VM details
|
||||
private final String name;
|
||||
private final String internalName;
|
||||
private final String displayName;
|
||||
private final String instanceUuid;
|
||||
private final Integer cpuCores;
|
||||
private final Integer memory;
|
||||
private final Long created;
|
||||
private final Long started;
|
||||
|
||||
// Owner details
|
||||
private final String ownerDomainUuid;
|
||||
private final String ownerDomainName;
|
||||
private final String ownerAccountUuid;
|
||||
private final String ownerAccountName;
|
||||
private final String ownerProjectUuid;
|
||||
private final String ownerProjectName;
|
||||
|
||||
// Host and service offering
|
||||
private final String serviceOfferingName;
|
||||
private final List<String> serviceOfferingHostTags;
|
||||
|
||||
// zone, pod, and cluster details
|
||||
private final String zoneName;
|
||||
private final String zoneUuid;
|
||||
private final String podName;
|
||||
private final String podUuid;
|
||||
private final String clusterName;
|
||||
private final String clusterUuid;
|
||||
|
||||
// resource tags
|
||||
private final Map<String, String> resourceTags;
|
||||
|
||||
public VirtualMachineMetadataTO(
|
||||
String name, String internalName, String displayName, String instanceUuid, Integer cpuCores, Integer memory, Long created, Long started,
|
||||
String ownerDomainUuid, String ownerDomainName, String ownerAccountUuid, String ownerAccountName, String ownerProjectUuid, String ownerProjectName,
|
||||
String serviceOfferingName, List<String> serviceOfferingHostTags,
|
||||
String zoneName, String zoneUuid, String podName, String podUuid, String clusterName, String clusterUuid, Map<String, String> resourceTags) {
|
||||
/*
|
||||
* Something failed in the metadata shall not be a fatal error, the VM can still be started
|
||||
* Thus, the unknown fields just get an explicit "unknown" value so it can be fixed in case
|
||||
* there are bugs on some execution paths.
|
||||
* */
|
||||
|
||||
this.name = (name != null) ? name : "unknown";
|
||||
this.internalName = (internalName != null) ? internalName : "unknown";
|
||||
this.displayName = (displayName != null) ? displayName : "unknown";
|
||||
this.instanceUuid = (instanceUuid != null) ? instanceUuid : "unknown";
|
||||
this.cpuCores = (cpuCores != null) ? cpuCores : -1;
|
||||
this.memory = (memory != null) ? memory : -1;
|
||||
this.created = (created != null) ? created : 0;
|
||||
this.started = (started != null) ? started : 0;
|
||||
this.ownerDomainUuid = (ownerDomainUuid != null) ? ownerDomainUuid : "unknown";
|
||||
this.ownerDomainName = (ownerDomainName != null) ? ownerDomainName : "unknown";
|
||||
this.ownerAccountUuid = (ownerAccountUuid != null) ? ownerAccountUuid : "unknown";
|
||||
this.ownerAccountName = (ownerAccountName != null) ? ownerAccountName : "unknown";
|
||||
this.ownerProjectUuid = (ownerProjectUuid != null) ? ownerProjectUuid : "unknown";
|
||||
this.ownerProjectName = (ownerProjectName != null) ? ownerProjectName : "unknown";
|
||||
this.serviceOfferingName = (serviceOfferingName != null) ? serviceOfferingName : "unknown";
|
||||
this.serviceOfferingHostTags = (serviceOfferingHostTags != null) ? serviceOfferingHostTags : new ArrayList<>();
|
||||
this.zoneName = (zoneName != null) ? zoneName : "unknown";
|
||||
this.zoneUuid = (zoneUuid != null) ? zoneUuid : "unknown";
|
||||
this.podName = (podName != null) ? podName : "unknown";
|
||||
this.podUuid = (podUuid != null) ? podUuid : "unknown";
|
||||
this.clusterName = (clusterName != null) ? clusterName : "unknown";
|
||||
this.clusterUuid = (clusterUuid != null) ? clusterUuid : "unknown";
|
||||
|
||||
this.resourceTags = (resourceTags != null) ? resourceTags : new HashMap<>();
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public String getInternalName() {
|
||||
return internalName;
|
||||
}
|
||||
|
||||
public String getDisplayName() {
|
||||
return displayName;
|
||||
}
|
||||
|
||||
public String getInstanceUuid() {
|
||||
return instanceUuid;
|
||||
}
|
||||
|
||||
public Integer getCpuCores() {
|
||||
return cpuCores;
|
||||
}
|
||||
|
||||
public Integer getMemory() {
|
||||
return memory;
|
||||
}
|
||||
|
||||
public Long getCreated() { return created; }
|
||||
|
||||
public Long getStarted() {
|
||||
return started;
|
||||
}
|
||||
|
||||
public String getOwnerDomainUuid() {
|
||||
return ownerDomainUuid;
|
||||
}
|
||||
|
||||
public String getOwnerDomainName() {
|
||||
return ownerDomainName;
|
||||
}
|
||||
|
||||
public String getOwnerAccountUuid() {
|
||||
return ownerAccountUuid;
|
||||
}
|
||||
|
||||
public String getOwnerAccountName() {
|
||||
return ownerAccountName;
|
||||
}
|
||||
|
||||
public String getOwnerProjectUuid() {
|
||||
return ownerProjectUuid;
|
||||
}
|
||||
|
||||
public String getOwnerProjectName() {
|
||||
return ownerProjectName;
|
||||
}
|
||||
|
||||
public String getserviceOfferingName() {
|
||||
return serviceOfferingName;
|
||||
}
|
||||
|
||||
public List<String> getserviceOfferingHostTags() {
|
||||
return serviceOfferingHostTags;
|
||||
}
|
||||
|
||||
public String getZoneName() {
|
||||
return zoneName;
|
||||
}
|
||||
|
||||
public String getZoneUuid() {
|
||||
return zoneUuid;
|
||||
}
|
||||
|
||||
public String getPodName() {
|
||||
return podName;
|
||||
}
|
||||
|
||||
public String getPodUuid() {
|
||||
return podUuid;
|
||||
}
|
||||
|
||||
public String getClusterName() {
|
||||
return clusterName;
|
||||
}
|
||||
|
||||
public String getClusterUuid() {
|
||||
return clusterUuid;
|
||||
}
|
||||
|
||||
public Map<String, String> getResourceTags() { return resourceTags; }
|
||||
}
|
||||
@ -89,6 +89,7 @@ public class VirtualMachineTO {
|
||||
private DeployAsIsInfoTO deployAsIsInfo;
|
||||
private String metadataManufacturer;
|
||||
private String metadataProductName;
|
||||
private VirtualMachineMetadataTO metadata;
|
||||
|
||||
public VirtualMachineTO(long id, String instanceName, VirtualMachine.Type type, int cpus, Integer speed, long minRam, long maxRam, BootloaderType bootloader,
|
||||
String os, boolean enableHA, boolean limitCpuUse, String vncPassword) {
|
||||
@ -494,6 +495,14 @@ public class VirtualMachineTO {
|
||||
this.metadataProductName = metadataProductName;
|
||||
}
|
||||
|
||||
public VirtualMachineMetadataTO getMetadata() {
|
||||
return metadata;
|
||||
}
|
||||
|
||||
public void setMetadata(VirtualMachineMetadataTO metadata) {
|
||||
this.metadata = metadata;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return String.format("VM {id: \"%s\", name: \"%s\", uuid: \"%s\", type: \"%s\"}", id, name, uuid, type);
|
||||
|
||||
@ -36,5 +36,4 @@ public interface HostStats {
|
||||
public HostStats getHostStats();
|
||||
|
||||
public double getLoadAverage();
|
||||
// public double getXapiMemoryUsageKBs();
|
||||
}
|
||||
|
||||
@ -78,7 +78,7 @@ public class Networks {
|
||||
}
|
||||
@Override
|
||||
public String getValueFrom(URI uri) {
|
||||
return uri.getAuthority();
|
||||
return uri == null ? null : uri.getAuthority();
|
||||
}
|
||||
},
|
||||
Vswitch("vs", String.class), LinkLocal(null, null), Vnet("vnet", Long.class), Storage("storage", Integer.class), Lswitch("lswitch", String.class) {
|
||||
@ -96,7 +96,7 @@ public class Networks {
|
||||
*/
|
||||
@Override
|
||||
public String getValueFrom(URI uri) {
|
||||
return uri.getSchemeSpecificPart();
|
||||
return uri == null ? null : uri.getSchemeSpecificPart();
|
||||
}
|
||||
},
|
||||
Mido("mido", String.class), Pvlan("pvlan", String.class),
|
||||
@ -177,7 +177,7 @@ public class Networks {
|
||||
* @return the scheme as BroadcastDomainType
|
||||
*/
|
||||
public static BroadcastDomainType getSchemeValue(URI uri) {
|
||||
return toEnumValue(uri.getScheme());
|
||||
return toEnumValue(uri == null ? null : uri.getScheme());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -191,7 +191,7 @@ public class Networks {
|
||||
if (com.cloud.dc.Vlan.UNTAGGED.equalsIgnoreCase(str)) {
|
||||
return Native;
|
||||
}
|
||||
return getSchemeValue(new URI(str));
|
||||
return getSchemeValue(str == null ? null : new URI(str));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -220,7 +220,7 @@ public class Networks {
|
||||
* @return the host part as String
|
||||
*/
|
||||
public String getValueFrom(URI uri) {
|
||||
return uri.getHost();
|
||||
return uri == null ? null : uri.getHost();
|
||||
}
|
||||
|
||||
/**
|
||||
@ -243,7 +243,7 @@ public class Networks {
|
||||
* @throws URISyntaxException the string is not even an uri
|
||||
*/
|
||||
public static String getValue(String uriString) throws URISyntaxException {
|
||||
return getValue(new URI(uriString));
|
||||
return getValue(uriString == null ? null : new URI(uriString));
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -41,4 +41,6 @@ public interface PhysicalNetworkTrafficType extends InternalIdentity, Identity {
|
||||
String getHypervNetworkLabel();
|
||||
|
||||
String getOvm3NetworkLabel();
|
||||
|
||||
String getVlan();
|
||||
}
|
||||
|
||||
@ -108,8 +108,7 @@ public class LbStickinessMethod {
|
||||
}
|
||||
|
||||
public void addParam(String name, Boolean required, String description, Boolean isFlag) {
|
||||
/* FIXME : UI is breaking if the capability string length is larger , temporarily description is commented out */
|
||||
// LbStickinessMethodParam param = new LbStickinessMethodParam(name, required, description);
|
||||
/* is this still a valid comment: FIXME : UI is breaking if the capability string length is larger , temporarily description is commented out */
|
||||
LbStickinessMethodParam param = new LbStickinessMethodParam(name, required, " ", isFlag);
|
||||
_paramList.add(param);
|
||||
return;
|
||||
@ -133,7 +132,6 @@ public class LbStickinessMethod {
|
||||
|
||||
public void setDescription(String description) {
|
||||
/* FIXME : UI is breaking if the capability string length is larger , temporarily description is commented out */
|
||||
//this.description = description;
|
||||
this._description = " ";
|
||||
}
|
||||
}
|
||||
|
||||
@ -128,7 +128,7 @@ public class Storage {
|
||||
public static enum TemplateType {
|
||||
ROUTING, // Router template
|
||||
SYSTEM, /* routing, system vm template */
|
||||
BUILTIN, /* buildin template */
|
||||
BUILTIN, /* builtin template */
|
||||
PERHOST, /* every host has this template, don't need to install it in secondary storage */
|
||||
USER, /* User supplied template/iso */
|
||||
VNF, /* VNFs (virtual network functions) template */
|
||||
|
||||
@ -150,7 +150,7 @@ public class UpdateCfgCmd extends BaseCmd {
|
||||
ConfigurationResponse response = _responseGenerator.createConfigurationResponse(cfg);
|
||||
response.setResponseName(getCommandName());
|
||||
response = setResponseScopes(response);
|
||||
response = setResponseValue(response, cfg);
|
||||
setResponseValue(response, cfg);
|
||||
this.setResponseObject(response);
|
||||
} else {
|
||||
throw new ServerApiException(ApiErrorCode.INTERNAL_ERROR, "Failed to update config");
|
||||
@ -161,15 +161,13 @@ public class UpdateCfgCmd extends BaseCmd {
|
||||
* Sets the configuration value in the response. If the configuration is in the `Hidden` or `Secure` categories, the value is encrypted before being set in the response.
|
||||
* @param response to be set with the configuration `cfg` value
|
||||
* @param cfg to be used in setting the response value
|
||||
* @return the response with the configuration's value
|
||||
*/
|
||||
public ConfigurationResponse setResponseValue(ConfigurationResponse response, Configuration cfg) {
|
||||
public void setResponseValue(ConfigurationResponse response, Configuration cfg) {
|
||||
String value = cfg.getValue();
|
||||
if (cfg.isEncrypted()) {
|
||||
response.setValue(DBEncryptionUtil.encrypt(getValue()));
|
||||
} else {
|
||||
response.setValue(getValue());
|
||||
value = DBEncryptionUtil.encrypt(value);
|
||||
}
|
||||
return response;
|
||||
response.setValue(value);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// Licensedname = "listIsoPermissions", to the Apache Software Foundation (ASF) under one
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
@ -153,6 +153,8 @@ public class UpdateStoragePoolCmd extends BaseCmd {
|
||||
if (ObjectUtils.anyNotNull(name, capacityIops, capacityBytes, url, isTagARule, tags) ||
|
||||
MapUtils.isNotEmpty(details)) {
|
||||
result = _storageService.updateStoragePool(this);
|
||||
} else {
|
||||
result = _storageService.getStoragePool(getId());
|
||||
}
|
||||
|
||||
if (enabled != null) {
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// Licensedname = "listTemplatePermissions", to the Apache Software Foundation (ASF) under one
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
@ -26,14 +26,13 @@ import org.apache.cloudstack.api.BaseListCmd;
|
||||
import org.apache.cloudstack.api.Parameter;
|
||||
import org.apache.cloudstack.api.response.ListResponse;
|
||||
import org.apache.cloudstack.api.response.PhysicalNetworkResponse;
|
||||
import org.apache.cloudstack.api.response.ProviderResponse;
|
||||
import org.apache.cloudstack.api.response.TrafficTypeResponse;
|
||||
|
||||
import com.cloud.network.PhysicalNetworkTrafficType;
|
||||
import com.cloud.user.Account;
|
||||
import com.cloud.utils.Pair;
|
||||
|
||||
@APICommand(name = "listTrafficTypes", description = "Lists traffic types of a given physical network.", responseObject = ProviderResponse.class, since = "3.0.0",
|
||||
@APICommand(name = "listTrafficTypes", description = "Lists traffic types of a given physical network.", responseObject = TrafficTypeResponse.class, since = "3.0.0",
|
||||
requestHasSensitiveInfo = false, responseHasSensitiveInfo = false)
|
||||
public class ListTrafficTypesCmd extends BaseListCmd {
|
||||
|
||||
|
||||
@ -53,7 +53,7 @@ public class ListPublicIpAddressesCmd extends BaseListRetrieveOnlyResourceCountC
|
||||
@Parameter(name = ApiConstants.ALLOCATED_ONLY, type = CommandType.BOOLEAN, description = "limits search results to allocated public IP addresses")
|
||||
private Boolean allocatedOnly;
|
||||
|
||||
@Parameter(name = ApiConstants.STATE, type = CommandType.STRING, description = "lists all public IP addresses by state")
|
||||
@Parameter(name = ApiConstants.STATE, type = CommandType.STRING, description = "lists all public IP addresses by state. A comma-separated list of states can be passed")
|
||||
private String state;
|
||||
|
||||
@Parameter(name = ApiConstants.FOR_VIRTUAL_NETWORK, type = CommandType.BOOLEAN, description = "the virtual network for the IP address")
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// Licensedname = "listIsoPermissions", to the Apache Software Foundation (ASF) under one
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
@ -63,7 +63,7 @@ public class UpdateLBHealthCheckPolicyCmd extends BaseAsyncCustomIdCmd{
|
||||
|
||||
@Override
|
||||
public String getEventDescription() {
|
||||
return "Update load balancer health check policy ID= " + id;
|
||||
return "Update load balancer health check policy ID = " + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -62,7 +62,7 @@ public class UpdateLBStickinessPolicyCmd extends BaseAsyncCustomIdCmd{
|
||||
|
||||
@Override
|
||||
public String getEventDescription() {
|
||||
return "Update load balancer stickiness policy ID= " + id;
|
||||
return "Update load balancer stickiness policy ID = " + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// Licensedname = "listTemplatePermissions", to the Apache Software Foundation (ASF) under one
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
|
||||
@ -66,7 +66,7 @@ public class UpdateVpnConnectionCmd extends BaseAsyncCustomIdCmd {
|
||||
|
||||
@Override
|
||||
public String getEventDescription() {
|
||||
return "Updating site-to-site VPN connection id= " + id;
|
||||
return "Updating site-to-site VPN connection ID = " + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -63,7 +63,7 @@ public class UpdateVpnGatewayCmd extends BaseAsyncCustomIdCmd {
|
||||
|
||||
@Override
|
||||
public String getEventDescription() {
|
||||
return "Update site-to-site VPN gateway id= " + id;
|
||||
return "Update site-to-site VPN gateway ID = " + id;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@ -27,8 +27,6 @@ import org.apache.cloudstack.api.EntityReference;
|
||||
import org.apache.cloudstack.network.tls.SslCert;
|
||||
import com.cloud.serializer.Param;
|
||||
|
||||
//import org.apache.cloudstack.api.EntityReference;
|
||||
|
||||
@EntityReference(value = SslCert.class)
|
||||
public class SslCertResponse extends BaseResponse {
|
||||
|
||||
|
||||
@ -56,6 +56,14 @@ public class TrafficTypeResponse extends BaseResponse {
|
||||
@Param(description = "The network name label of the physical device dedicated to this traffic on a HyperV host")
|
||||
private String hypervNetworkLabel;
|
||||
|
||||
@SerializedName(ApiConstants.VLAN)
|
||||
@Param(description = "The VLAN id to be used for Management traffic by VMware host")
|
||||
private String vlan;
|
||||
|
||||
@SerializedName(ApiConstants.ISOLATION_METHODS)
|
||||
@Param(description = "isolation methods for the physical network traffic")
|
||||
private String isolationMethods;
|
||||
|
||||
@SerializedName(ApiConstants.OVM3_NETWORK_LABEL)
|
||||
@Param(description = "The network name of the physical device dedicated to this traffic on an OVM3 host")
|
||||
private String ovm3NetworkLabel;
|
||||
@ -128,4 +136,20 @@ public class TrafficTypeResponse extends BaseResponse {
|
||||
public void setOvm3Label(String ovm3Label) {
|
||||
this.ovm3NetworkLabel = ovm3Label;
|
||||
}
|
||||
|
||||
public String getIsolationMethods() {
|
||||
return isolationMethods;
|
||||
}
|
||||
|
||||
public void setIsolationMethods(String isolationMethods) {
|
||||
this.isolationMethods = isolationMethods;
|
||||
}
|
||||
|
||||
public String getVlan() {
|
||||
return vlan;
|
||||
}
|
||||
|
||||
public void setVlan(String vlan) {
|
||||
this.vlan = vlan;
|
||||
}
|
||||
}
|
||||
|
||||
@ -124,6 +124,10 @@ public interface BackupProvider {
|
||||
*/
|
||||
boolean supportsInstanceFromBackup();
|
||||
|
||||
default boolean supportsMemoryVmSnapshot() {
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the backup storage usage (Used, Total) for a backup provider
|
||||
* @param zoneId the zone for which to return metrics
|
||||
|
||||
@ -34,4 +34,11 @@ public interface BackupService {
|
||||
* @return backup provider
|
||||
*/
|
||||
BackupProvider getBackupProvider(final Long zoneId);
|
||||
|
||||
/**
|
||||
* Find backup provider by name
|
||||
* @param name backup provider name
|
||||
* @return backup provider
|
||||
*/
|
||||
BackupProvider getBackupProvider(final String name);
|
||||
}
|
||||
|
||||
@ -37,6 +37,24 @@ public class NetworksTest {
|
||||
public void setUp() {
|
||||
}
|
||||
|
||||
@Test
|
||||
public void nullBroadcastDomainTypeTest() throws URISyntaxException {
|
||||
BroadcastDomainType type = BroadcastDomainType.getTypeOf(null);
|
||||
Assert.assertEquals("a null uri should mean a broadcasttype of undecided", BroadcastDomainType.UnDecided, type);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void nullBroadcastDomainTypeValueTest() {
|
||||
URI uri = null;
|
||||
Assert.assertNull(BroadcastDomainType.getValue(uri));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void nullBroadcastDomainTypeStringValueTest() throws URISyntaxException {
|
||||
String uriString = null;
|
||||
Assert.assertNull(BroadcastDomainType.getValue(uriString));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void emptyBroadcastDomainTypeTest() throws URISyntaxException {
|
||||
BroadcastDomainType type = BroadcastDomainType.getTypeOf("");
|
||||
|
||||
@ -0,0 +1,81 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
package org.apache.cloudstack.api.command.admin.config;
|
||||
|
||||
import org.apache.cloudstack.api.response.ConfigurationResponse;
|
||||
import org.apache.cloudstack.config.Configuration;
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.mockito.MockedStatic;
|
||||
import org.mockito.Mockito;
|
||||
import org.mockito.junit.MockitoJUnitRunner;
|
||||
|
||||
import com.cloud.utils.crypt.DBEncryptionUtil;
|
||||
|
||||
@RunWith(MockitoJUnitRunner.class)
|
||||
public class UpdateCfgCmdTest {
|
||||
|
||||
private UpdateCfgCmd updateCfgCmd;
|
||||
|
||||
private MockedStatic<DBEncryptionUtil> mockedStatic;
|
||||
|
||||
@Before
|
||||
public void setUp() {
|
||||
updateCfgCmd = new UpdateCfgCmd();
|
||||
mockedStatic = Mockito.mockStatic(DBEncryptionUtil.class);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() {
|
||||
mockedStatic.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setResponseValueSetsEncryptedValueWhenConfigurationIsEncrypted() {
|
||||
ConfigurationResponse response = new ConfigurationResponse();
|
||||
Configuration cfg = Mockito.mock(Configuration.class);
|
||||
Mockito.when(cfg.isEncrypted()).thenReturn(true);
|
||||
Mockito.when(cfg.getValue()).thenReturn("testValue");
|
||||
Mockito.when(DBEncryptionUtil.encrypt("testValue")).thenReturn("encryptedValue");
|
||||
updateCfgCmd.setResponseValue(response, cfg);
|
||||
Assert.assertEquals("encryptedValue", response.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setResponseValueSetsPlainValueWhenConfigurationIsNotEncrypted() {
|
||||
ConfigurationResponse response = new ConfigurationResponse();
|
||||
Configuration cfg = Mockito.mock(Configuration.class);
|
||||
Mockito.when(cfg.isEncrypted()).thenReturn(false);
|
||||
Mockito.when(cfg.getValue()).thenReturn("testValue");
|
||||
updateCfgCmd.setResponseValue(response, cfg);
|
||||
Assert.assertEquals("testValue", response.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void setResponseValueHandlesNullConfigurationValueGracefully() {
|
||||
ConfigurationResponse response = new ConfigurationResponse();
|
||||
Configuration cfg = Mockito.mock(Configuration.class);
|
||||
Mockito.when(cfg.isEncrypted()).thenReturn(false);
|
||||
Mockito.when(cfg.getValue()).thenReturn(null);
|
||||
updateCfgCmd.setResponseValue(response, cfg);
|
||||
Assert.assertNull(response.getValue());
|
||||
}
|
||||
|
||||
}
|
||||
@ -78,10 +78,6 @@ public class ScaleVMCmdTest extends TestCase {
|
||||
scaleVMCmd._responseGenerator = responseGenerator;
|
||||
|
||||
UserVmResponse userVmResponse = Mockito.mock(UserVmResponse.class);
|
||||
//List<UserVmResponse> list = Mockito.mock(UserVmResponse.class);
|
||||
//list.add(userVmResponse);
|
||||
//LinkedList<UserVmResponse> mockedList = Mockito.mock(LinkedList.class);
|
||||
//Mockito.when(mockedList.get(0)).thenReturn(userVmResponse);
|
||||
|
||||
List<UserVmResponse> list = new LinkedList<UserVmResponse>();
|
||||
list.add(userVmResponse);
|
||||
|
||||
@ -25,7 +25,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
||||
@ -629,9 +629,6 @@ public class HAProxyConfigurator implements LoadBalancerConfigurator {
|
||||
}
|
||||
}
|
||||
result.addAll(gSection);
|
||||
// TODO decide under what circumstances these options are needed
|
||||
// result.add("\tnokqueue");
|
||||
// result.add("\tnopoll");
|
||||
|
||||
result.add(blankLine);
|
||||
final List<String> dSection = Arrays.asList(defaultsSection);
|
||||
|
||||
@ -417,8 +417,6 @@ public class VirtualRoutingResourceTest implements VirtualRouterDeployer {
|
||||
// FIXME Check the json content
|
||||
assertEquals(VRScripts.UPDATE_CONFIG, script);
|
||||
assertEquals(VRScripts.NETWORK_ACL_CONFIG, args);
|
||||
// assertEquals(args, " -d eth3 -M 01:23:45:67:89:AB -i 192.168.1.1 -m 24 -a Egress:ALL:0:0:192.168.0.1/24-192.168.0.2/24:ACCEPT:," +
|
||||
// "Ingress:ICMP:0:0:192.168.0.1/24-192.168.0.2/24:DROP:,Ingress:TCP:20:80:192.168.0.1/24-192.168.0.2/24:ACCEPT:,");
|
||||
break;
|
||||
case 2:
|
||||
assertEquals(VRScripts.UPDATE_CONFIG, script);
|
||||
@ -464,8 +462,6 @@ public class VirtualRoutingResourceTest implements VirtualRouterDeployer {
|
||||
|
||||
private void verifyArgs(final SetupGuestNetworkCommand cmd, final String script, final String args) {
|
||||
// TODO Check the contents of the json file
|
||||
//assertEquals(script, VRScripts.VPC_GUEST_NETWORK);
|
||||
//assertEquals(args, " -C -M 01:23:45:67:89:AB -d eth4 -i 10.1.1.2 -g 10.1.1.1 -m 24 -n 10.1.1.0 -s 8.8.8.8,8.8.4.4 -e cloud.test");
|
||||
}
|
||||
|
||||
@Test
|
||||
|
||||
8
debian/changelog
vendored
8
debian/changelog
vendored
@ -1,12 +1,12 @@
|
||||
cloudstack (4.22.0.0) unstable; urgency=low
|
||||
cloudstack (4.23.0.0-SNAPSHOT) unstable; urgency=low
|
||||
|
||||
* Update the version to 4.22.0.0
|
||||
* Update the version to 4.23.0.0-SNAPSHOT
|
||||
|
||||
-- the Apache CloudStack project <dev@cloudstack.apache.org> Thu, 30 Oct 2025 19:23:55 +0530
|
||||
|
||||
cloudstack (4.22.0.0-SNAPSHOT) unstable; urgency=low
|
||||
cloudstack (4.23.0.0-SNAPSHOT-SNAPSHOT) unstable; urgency=low
|
||||
|
||||
* Update the version to 4.22.0.0-SNAPSHOT
|
||||
* Update the version to 4.23.0.0-SNAPSHOT-SNAPSHOT
|
||||
|
||||
-- the Apache CloudStack project <dev@cloudstack.apache.org> Thu, Aug 28 11:58:36 2025 +0530
|
||||
|
||||
|
||||
1
debian/cloudstack-agent.install
vendored
1
debian/cloudstack-agent.install
vendored
@ -16,6 +16,7 @@
|
||||
# under the License.
|
||||
|
||||
/etc/cloudstack/agent/agent.properties
|
||||
/etc/cloudstack/agent/uefi.properties
|
||||
/etc/cloudstack/agent/environment.properties
|
||||
/etc/cloudstack/agent/log4j-cloud.xml
|
||||
/etc/default/cloudstack-agent
|
||||
|
||||
2
debian/cloudstack-agent.postinst
vendored
2
debian/cloudstack-agent.postinst
vendored
@ -23,7 +23,7 @@ case "$1" in
|
||||
configure)
|
||||
OLDCONFDIR="/etc/cloud/agent"
|
||||
NEWCONFDIR="/etc/cloudstack/agent"
|
||||
CONFFILES="agent.properties log4j.xml log4j-cloud.xml"
|
||||
CONFFILES="agent.properties uefi.properties log4j.xml log4j-cloud.xml"
|
||||
|
||||
mkdir -m 0755 -p /usr/share/cloudstack-agent/tmp
|
||||
|
||||
|
||||
2
debian/control
vendored
2
debian/control
vendored
@ -24,7 +24,7 @@ Description: CloudStack server library
|
||||
|
||||
Package: cloudstack-agent
|
||||
Architecture: all
|
||||
Depends: ${python:Depends}, ${python3:Depends}, openjdk-17-jre-headless | java17-runtime-headless | java17-runtime | zulu-17, cloudstack-common (= ${source:Version}), lsb-base (>= 9), openssh-client, qemu-kvm (>= 2.5) | qemu-system-x86 (>= 5.2), libvirt-bin (>= 1.3) | libvirt-daemon-system (>= 3.0), iproute2, ebtables, vlan, ipset, python3-libvirt, ethtool, iptables, cryptsetup, rng-tools, rsync, lsb-release, ufw, apparmor, cpu-checker, libvirt-daemon-driver-storage-rbd, sysstat
|
||||
Depends: ${python:Depends}, ${python3:Depends}, openjdk-17-jre-headless | java17-runtime-headless | java17-runtime | zulu-17, cloudstack-common (= ${source:Version}), lsb-base (>= 9), openssh-client, qemu-kvm (>= 2.5) | qemu-system-x86 (>= 5.2), libvirt-bin (>= 1.3) | libvirt-daemon-system (>= 3.0), iproute2, ebtables, vlan, ipset, python3-libvirt, ethtool, iptables, cryptsetup, rng-tools, rsync, ovmf, swtpm, lsb-release, ufw, apparmor, cpu-checker, libvirt-daemon-driver-storage-rbd, sysstat
|
||||
Recommends: init-system-helpers
|
||||
Conflicts: cloud-agent, cloud-agent-libs, cloud-agent-deps, cloud-agent-scripts
|
||||
Description: CloudStack agent
|
||||
|
||||
@ -25,7 +25,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<dependencies>
|
||||
<dependency>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -94,6 +94,14 @@ public class UsageEventUtils {
|
||||
|
||||
}
|
||||
|
||||
public static void publishUsageEvent(String usageType, long accountId, long zoneId, long resourceId, String resourceName, Long offeringId, Long templateId,
|
||||
Long size, String entityType, String entityUUID, Long vmId, boolean displayResource) {
|
||||
if (displayResource) {
|
||||
saveUsageEvent(usageType, accountId, zoneId, resourceId, offeringId, templateId, size, vmId, resourceName);
|
||||
}
|
||||
publishUsageEvent(usageType, accountId, zoneId, entityType, entityUUID);
|
||||
}
|
||||
|
||||
public static void publishUsageEvent(String usageType, long accountId, long zoneId, long resourceId, String resourceName, Long offeringId, Long templateId,
|
||||
Long size, Long virtualSize, String entityType, String entityUUID, Map<String, String> details) {
|
||||
saveUsageEvent(usageType, accountId, zoneId, resourceId, resourceName, offeringId, templateId, size, virtualSize, details);
|
||||
@ -202,6 +210,10 @@ public class UsageEventUtils {
|
||||
s_usageEventDao.persist(new UsageEventVO(usageType, accountId, zoneId, vmId, securityGroupId));
|
||||
}
|
||||
|
||||
public static void saveUsageEvent(String usageType, long accountId, long zoneId, long resourceId, Long offeringId, Long templateId, Long size, Long vmId, String resourceName) {
|
||||
s_usageEventDao.persist(new UsageEventVO(usageType, accountId, zoneId, resourceId, offeringId, templateId, size, vmId, resourceName));
|
||||
}
|
||||
|
||||
private static void publishUsageEvent(String usageEventType, Long accountId, Long zoneId, String resourceType, String resourceUUID) {
|
||||
String configKey = "publish.usage.events";
|
||||
String value = s_configDao.getValue(configKey);
|
||||
|
||||
@ -230,7 +230,7 @@ public interface StorageManager extends StorageService {
|
||||
|
||||
/**
|
||||
* should we execute in sequence not involving any storages?
|
||||
* @return tru if commands should execute in sequence
|
||||
* @return true if commands should execute in sequence
|
||||
*/
|
||||
static boolean shouldExecuteInSequenceOnVmware() {
|
||||
return shouldExecuteInSequenceOnVmware(null, null);
|
||||
|
||||
@ -61,7 +61,6 @@ public class VmWorkSerializer {
|
||||
// use java binary serialization instead
|
||||
//
|
||||
return JobSerializerHelper.toObjectSerializedString(work);
|
||||
// return s_gson.toJson(work);
|
||||
}
|
||||
|
||||
public static <T extends VmWork> T deserialize(Class<?> clazz, String workInJsonText) {
|
||||
@ -69,6 +68,5 @@ public class VmWorkSerializer {
|
||||
// use java binary serialization instead
|
||||
//
|
||||
return (T)JobSerializerHelper.fromObjectSerializedString(workInJsonText);
|
||||
// return (T)s_gson.fromJson(workInJsonText, clazz);
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ public interface VMSnapshotManager extends VMSnapshotService, Manager {
|
||||
boolean deleteAllVMSnapshots(long id, VMSnapshot.Type type);
|
||||
|
||||
/**
|
||||
* Sync VM snapshot state when VM snapshot in reverting or snapshoting or expunging state
|
||||
* Sync VM snapshot state when VM snapshot in reverting or snapshotting or expunging state
|
||||
* Used for fullsync after agent connects
|
||||
*
|
||||
* @param vm, the VM in question
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -1652,7 +1652,6 @@ public class AgentManagerImpl extends ManagerBase implements AgentManager, Handl
|
||||
final String reason = shutdown.getReason();
|
||||
logger.info("Host {} has informed us that it is shutting down with reason {} and detail {}", attache, reason, shutdown.getDetail());
|
||||
if (reason.equals(ShutdownCommand.Update)) {
|
||||
// disconnectWithoutInvestigation(attache, Event.UpdateNeeded);
|
||||
throw new CloudRuntimeException("Agent update not implemented");
|
||||
} else if (reason.equals(ShutdownCommand.Requested)) {
|
||||
disconnectWithoutInvestigation(attache, Event.ShutdownRequested);
|
||||
@ -1753,7 +1752,6 @@ public class AgentManagerImpl extends ManagerBase implements AgentManager, Handl
|
||||
}
|
||||
} catch (final UnsupportedVersionException e) {
|
||||
logger.warn(e.getMessage());
|
||||
// upgradeAgent(task.getLink(), data, e.getReason());
|
||||
} catch (final ClassNotFoundException e) {
|
||||
final String message = String.format("Exception occurred when executing tasks! Error '%s'", e.getMessage());
|
||||
logger.error(message);
|
||||
|
||||
@ -965,7 +965,6 @@ public class ClusteredAgentManagerImpl extends AgentManagerImpl implements Clust
|
||||
synchronized (_agentToTransferIds) {
|
||||
if (!_agentToTransferIds.isEmpty()) {
|
||||
logger.debug("Found {} agents to transfer", _agentToTransferIds.size());
|
||||
// for (Long hostId : _agentToTransferIds) {
|
||||
for (final Iterator<Long> iterator = _agentToTransferIds.iterator(); iterator.hasNext(); ) {
|
||||
final Long hostId = iterator.next();
|
||||
final AgentAttache attache = findAttache(hostId);
|
||||
|
||||
@ -213,7 +213,6 @@ public class EngineHostDaoImpl extends GenericDaoBase<EngineHostVO, Long> implem
|
||||
|
||||
SequenceSearch = createSearchBuilder();
|
||||
SequenceSearch.and("id", SequenceSearch.entity().getId(), SearchCriteria.Op.EQ);
|
||||
// SequenceSearch.addRetrieve("sequence", SequenceSearch.entity().getSequence());
|
||||
SequenceSearch.done();
|
||||
|
||||
DirectlyConnectedSearch = createSearchBuilder();
|
||||
|
||||
@ -903,7 +903,7 @@ public class VolumeOrchestrator extends ManagerBase implements VolumeOrchestrati
|
||||
// Save usage event and update resource count for user vm volumes
|
||||
if (vm.getType() == VirtualMachine.Type.User) {
|
||||
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offering.getId(), null, size,
|
||||
Volume.class.getName(), vol.getUuid(), vol.isDisplayVolume());
|
||||
Volume.class.getName(), vol.getUuid(), vol.getInstanceId(), vol.isDisplayVolume());
|
||||
_resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering);
|
||||
}
|
||||
DiskProfile diskProfile = toDiskProfile(vol, offering);
|
||||
@ -981,7 +981,7 @@ public class VolumeOrchestrator extends ManagerBase implements VolumeOrchestrati
|
||||
}
|
||||
|
||||
UsageEventUtils.publishUsageEvent(EventTypes.EVENT_VOLUME_CREATE, vol.getAccountId(), vol.getDataCenterId(), vol.getId(), vol.getName(), offeringId, vol.getTemplateId(), size,
|
||||
Volume.class.getName(), vol.getUuid(), vol.isDisplayVolume());
|
||||
Volume.class.getName(), vol.getUuid(), vol.getInstanceId(), vol.isDisplayVolume());
|
||||
|
||||
_resourceLimitMgr.incrementVolumeResourceCount(vm.getAccountId(), vol.isDisplayVolume(), vol.getSize(), offering);
|
||||
}
|
||||
@ -1583,12 +1583,8 @@ public class VolumeOrchestrator extends ManagerBase implements VolumeOrchestrati
|
||||
vm.addDisk(disk);
|
||||
}
|
||||
|
||||
//if (vm.getType() == VirtualMachine.Type.User && vm.getTemplate().getFormat() == ImageFormat.ISO) {
|
||||
if (vm.getType() == VirtualMachine.Type.User) {
|
||||
_tmpltMgr.prepareIsoForVmProfile(vm, dest);
|
||||
//DataTO dataTO = tmplFactory.getTemplate(vm.getTemplate().getId(), DataStoreRole.Image, vm.getVirtualMachine().getDataCenterId()).getTO();
|
||||
//DiskTO iso = new DiskTO(dataTO, 3L, null, Volume.Type.ISO);
|
||||
//vm.addDisk(iso);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -140,20 +140,12 @@ public class ProvisioningServiceImpl implements ProvisioningService {
|
||||
|
||||
@Override
|
||||
public List<PodEntity> listPods() {
|
||||
/*
|
||||
* Not in use now, just commented out.
|
||||
*/
|
||||
//List<PodEntity> pods = new ArrayList<PodEntity>();
|
||||
//pods.add(new PodEntityImpl("pod-uuid-1", "pod1"));
|
||||
//pods.add(new PodEntityImpl("pod-uuid-2", "pod2"));
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<ZoneEntity> listZones() {
|
||||
List<ZoneEntity> zones = new ArrayList<ZoneEntity>();
|
||||
//zones.add(new ZoneEntityImpl("zone-uuid-1"));
|
||||
//zones.add(new ZoneEntityImpl("zone-uuid-2"));
|
||||
return zones;
|
||||
}
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloudstack</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<build>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -36,7 +36,6 @@ public class ClusterVSMMapDaoImpl extends GenericDaoBase<ClusterVSMMapVO, Long>
|
||||
final SearchBuilder<ClusterVSMMapVO> VsmSearch;
|
||||
|
||||
public ClusterVSMMapDaoImpl() {
|
||||
//super();
|
||||
|
||||
ClusterSearch = createSearchBuilder();
|
||||
ClusterSearch.and("clusterId", ClusterSearch.entity().getClusterId(), SearchCriteria.Op.EQ);
|
||||
@ -82,8 +81,6 @@ public class ClusterVSMMapDaoImpl extends GenericDaoBase<ClusterVSMMapVO, Long>
|
||||
TransactionLegacy txn = TransactionLegacy.currentTxn();
|
||||
txn.start();
|
||||
ClusterVSMMapVO cluster = createForUpdate();
|
||||
//cluster.setClusterId(null);
|
||||
//cluster.setVsmId(null);
|
||||
|
||||
update(id, cluster);
|
||||
|
||||
|
||||
@ -75,6 +75,9 @@ public class UsageEventVO implements UsageEvent {
|
||||
@Column(name = "virtual_size")
|
||||
private Long virtualSize;
|
||||
|
||||
@Column(name = "vm_id")
|
||||
private Long vmId;
|
||||
|
||||
public UsageEventVO() {
|
||||
}
|
||||
|
||||
@ -143,6 +146,18 @@ public class UsageEventVO implements UsageEvent {
|
||||
this.offeringId = securityGroupId;
|
||||
}
|
||||
|
||||
public UsageEventVO(String usageType, long accountId, long zoneId, long resourceId, Long offeringId, Long templateId, Long size, Long vmId, String resourceName) {
|
||||
this.type = usageType;
|
||||
this.accountId = accountId;
|
||||
this.zoneId = zoneId;
|
||||
this.resourceId = resourceId;
|
||||
this.offeringId = offeringId;
|
||||
this.templateId = templateId;
|
||||
this.size = size;
|
||||
this.vmId = vmId;
|
||||
this.resourceName = resourceName;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getId() {
|
||||
return id;
|
||||
@ -248,4 +263,11 @@ public class UsageEventVO implements UsageEvent {
|
||||
this.virtualSize = virtualSize;
|
||||
}
|
||||
|
||||
public Long getVmId() {
|
||||
return vmId;
|
||||
}
|
||||
|
||||
public void setVmId(Long vmId) {
|
||||
this.vmId = vmId;
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,11 +45,11 @@ public class UsageEventDaoImpl extends GenericDaoBase<UsageEventVO, Long> implem
|
||||
private final SearchBuilder<UsageEventVO> latestEventsSearch;
|
||||
private final SearchBuilder<UsageEventVO> IpeventsSearch;
|
||||
private static final String COPY_EVENTS =
|
||||
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size) "
|
||||
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size FROM cloud.usage_event vmevt WHERE vmevt.id > ? and vmevt.id <= ? ";
|
||||
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id) "
|
||||
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id FROM cloud.usage_event vmevt WHERE vmevt.id > ? and vmevt.id <= ? ";
|
||||
private static final String COPY_ALL_EVENTS =
|
||||
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size) "
|
||||
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size FROM cloud.usage_event vmevt WHERE vmevt.id <= ?";
|
||||
"INSERT INTO cloud_usage.usage_event (id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id) "
|
||||
+ "SELECT id, type, account_id, created, zone_id, resource_id, resource_name, offering_id, template_id, size, resource_type, virtual_size, vm_id FROM cloud.usage_event vmevt WHERE vmevt.id <= ?";
|
||||
private static final String COPY_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) "
|
||||
+ "SELECT id, usage_event_id, name, value FROM cloud.usage_event_details vmevtDetails WHERE vmevtDetails.usage_event_id > ? and vmevtDetails.usage_event_id <= ? ";
|
||||
private static final String COPY_ALL_EVENT_DETAILS = "INSERT INTO cloud_usage.usage_event_details (id, usage_event_id, name, value) "
|
||||
|
||||
@ -76,7 +76,6 @@ public class VmRulesetLogDaoImpl extends GenericDaoBase<VmRulesetLogVO, Long> im
|
||||
|
||||
@Override
|
||||
public int createOrUpdate(Set<Long> workItems) {
|
||||
//return createOrUpdateUsingBatch(workItems);
|
||||
return createOrUpdateUsingMultiInsert(workItems);
|
||||
}
|
||||
|
||||
|
||||
@ -100,7 +100,6 @@ public class VMTemplateDaoImpl extends GenericDaoBase<VMTemplateVO, Long> implem
|
||||
private SearchBuilder<VMTemplateVO> PublicIsoSearch;
|
||||
private SearchBuilder<VMTemplateVO> UserIsoSearch;
|
||||
private GenericSearchBuilder<VMTemplateVO, Long> CountTemplatesByAccount;
|
||||
// private SearchBuilder<VMTemplateVO> updateStateSearch;
|
||||
private SearchBuilder<VMTemplateVO> AllFieldsSearch;
|
||||
protected SearchBuilder<VMTemplateVO> ParentTemplateIdSearch;
|
||||
private SearchBuilder<VMTemplateVO> InactiveUnremovedTmpltSearch;
|
||||
@ -404,12 +403,6 @@ public class VMTemplateDaoImpl extends GenericDaoBase<VMTemplateVO, Long> implem
|
||||
CountTemplatesByAccount.and("state", CountTemplatesByAccount.entity().getState(), SearchCriteria.Op.EQ);
|
||||
CountTemplatesByAccount.done();
|
||||
|
||||
// updateStateSearch = this.createSearchBuilder();
|
||||
// updateStateSearch.and("id", updateStateSearch.entity().getId(), Op.EQ);
|
||||
// updateStateSearch.and("state", updateStateSearch.entity().getState(), Op.EQ);
|
||||
// updateStateSearch.and("updatedCount", updateStateSearch.entity().getUpdatedCount(), Op.EQ);
|
||||
// updateStateSearch.done();
|
||||
|
||||
AllFieldsSearch = createSearchBuilder();
|
||||
AllFieldsSearch.and("state", AllFieldsSearch.entity().getState(), SearchCriteria.Op.EQ);
|
||||
AllFieldsSearch.and("accountId", AllFieldsSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
|
||||
|
||||
@ -33,11 +33,10 @@ import java.util.List;
|
||||
|
||||
import javax.inject.Inject;
|
||||
|
||||
import com.cloud.utils.FileUtil;
|
||||
import org.apache.cloudstack.utils.CloudStackVersion;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
|
||||
import com.cloud.upgrade.dao.DbUpgrade;
|
||||
import com.cloud.upgrade.dao.DbUpgradeSystemVmTemplate;
|
||||
@ -91,8 +90,10 @@ import com.cloud.upgrade.dao.Upgrade41910to42000;
|
||||
import com.cloud.upgrade.dao.Upgrade42000to42010;
|
||||
import com.cloud.upgrade.dao.Upgrade42010to42100;
|
||||
import com.cloud.upgrade.dao.Upgrade42100to42200;
|
||||
import com.cloud.upgrade.dao.Upgrade42200to42210;
|
||||
import com.cloud.upgrade.dao.Upgrade420to421;
|
||||
import com.cloud.upgrade.dao.Upgrade421to430;
|
||||
import com.cloud.upgrade.dao.Upgrade42210to42300;
|
||||
import com.cloud.upgrade.dao.Upgrade430to440;
|
||||
import com.cloud.upgrade.dao.Upgrade431to440;
|
||||
import com.cloud.upgrade.dao.Upgrade432to440;
|
||||
@ -121,6 +122,7 @@ import com.cloud.upgrade.dao.VersionDao;
|
||||
import com.cloud.upgrade.dao.VersionDaoImpl;
|
||||
import com.cloud.upgrade.dao.VersionVO;
|
||||
import com.cloud.upgrade.dao.VersionVO.Step;
|
||||
import com.cloud.utils.FileUtil;
|
||||
import com.cloud.utils.component.SystemIntegrityChecker;
|
||||
import com.cloud.utils.crypt.DBEncryptionUtil;
|
||||
import com.cloud.utils.db.GlobalLock;
|
||||
@ -236,6 +238,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
.next("4.20.0.0", new Upgrade42000to42010())
|
||||
.next("4.20.1.0", new Upgrade42010to42100())
|
||||
.next("4.21.0.0", new Upgrade42100to42200())
|
||||
.next("4.22.0.0", new Upgrade42200to42210())
|
||||
.next("4.22.1.0", new Upgrade42210to42300())
|
||||
.build();
|
||||
}
|
||||
|
||||
@ -313,20 +317,20 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
}
|
||||
|
||||
protected void executeProcedureScripts() {
|
||||
LOGGER.info(String.format("Executing Stored Procedure scripts that are under resource directory [%s].", PROCEDURES_DIRECTORY));
|
||||
LOGGER.info("Executing Stored Procedure scripts that are under resource directory [{}].", PROCEDURES_DIRECTORY);
|
||||
List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(PROCEDURES_DIRECTORY);
|
||||
|
||||
try (TransactionLegacy txn = TransactionLegacy.open("execute-procedure-scripts")) {
|
||||
Connection conn = txn.getConnection();
|
||||
|
||||
for (String filePath : filesPathUnderViewsDirectory) {
|
||||
LOGGER.debug(String.format("Executing PROCEDURE script [%s].", filePath));
|
||||
LOGGER.debug("Executing PROCEDURE script [{}].", filePath);
|
||||
|
||||
InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath);
|
||||
runScript(conn, viewScript);
|
||||
}
|
||||
|
||||
LOGGER.info(String.format("Finished execution of PROCEDURE scripts that are under resource directory [%s].", PROCEDURES_DIRECTORY));
|
||||
LOGGER.info("Finished execution of PROCEDURE scripts that are under resource directory [{}].", PROCEDURES_DIRECTORY);
|
||||
} catch (SQLException e) {
|
||||
String message = String.format("Unable to execute PROCEDURE scripts due to [%s].", e.getMessage());
|
||||
LOGGER.error(message, e);
|
||||
@ -335,7 +339,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
}
|
||||
|
||||
private DbUpgrade[] executeUpgrades(CloudStackVersion dbVersion, CloudStackVersion currentVersion) {
|
||||
LOGGER.info("Database upgrade must be performed from " + dbVersion + " to " + currentVersion);
|
||||
LOGGER.info("Database upgrade must be performed from {} to {}", dbVersion, currentVersion);
|
||||
|
||||
final DbUpgrade[] upgrades = calculateUpgradePath(dbVersion, currentVersion);
|
||||
|
||||
@ -348,8 +352,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
|
||||
private VersionVO executeUpgrade(DbUpgrade upgrade) {
|
||||
VersionVO version;
|
||||
LOGGER.debug("Running upgrade " + upgrade.getClass().getSimpleName() + " to upgrade from " + upgrade.getUpgradableVersionRange()[0] + "-" + upgrade
|
||||
.getUpgradableVersionRange()[1] + " to " + upgrade.getUpgradedVersion());
|
||||
LOGGER.debug("Running upgrade {} to upgrade from {}-{} to {}", upgrade.getClass().getSimpleName(), upgrade.getUpgradableVersionRange()[0], upgrade
|
||||
.getUpgradableVersionRange()[1], upgrade.getUpgradedVersion());
|
||||
TransactionLegacy txn = TransactionLegacy.open("Upgrade");
|
||||
txn.start();
|
||||
try {
|
||||
@ -392,8 +396,8 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
// Run the corresponding '-cleanup.sql' script
|
||||
txn = TransactionLegacy.open("Cleanup");
|
||||
try {
|
||||
LOGGER.info("Cleanup upgrade " + upgrade.getClass().getSimpleName() + " to upgrade from " + upgrade.getUpgradableVersionRange()[0] + "-" + upgrade
|
||||
.getUpgradableVersionRange()[1] + " to " + upgrade.getUpgradedVersion());
|
||||
LOGGER.info("Cleanup upgrade {} to upgrade from {}-{} to {}", upgrade.getClass().getSimpleName(), upgrade.getUpgradableVersionRange()[0], upgrade
|
||||
.getUpgradableVersionRange()[1], upgrade.getUpgradedVersion());
|
||||
|
||||
txn.start();
|
||||
Connection conn;
|
||||
@ -408,7 +412,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
if (scripts != null) {
|
||||
for (InputStream script : scripts) {
|
||||
runScript(conn, script);
|
||||
LOGGER.debug("Cleanup script " + upgrade.getClass().getSimpleName() + " is executed successfully");
|
||||
LOGGER.debug("Cleanup script {} is executed successfully", upgrade.getClass().getSimpleName());
|
||||
}
|
||||
}
|
||||
txn.commit();
|
||||
@ -418,27 +422,27 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
version.setUpdated(new Date());
|
||||
_dao.update(version.getId(), version);
|
||||
txn.commit();
|
||||
LOGGER.debug("Upgrade completed for version " + version.getVersion());
|
||||
LOGGER.debug("Upgrade completed for version {}", version.getVersion());
|
||||
} finally {
|
||||
txn.close();
|
||||
}
|
||||
}
|
||||
|
||||
protected void executeViewScripts() {
|
||||
LOGGER.info(String.format("Executing VIEW scripts that are under resource directory [%s].", VIEWS_DIRECTORY));
|
||||
LOGGER.info("Executing VIEW scripts that are under resource directory [{}].", VIEWS_DIRECTORY);
|
||||
List<String> filesPathUnderViewsDirectory = FileUtil.getFilesPathsUnderResourceDirectory(VIEWS_DIRECTORY);
|
||||
|
||||
try (TransactionLegacy txn = TransactionLegacy.open("execute-view-scripts")) {
|
||||
Connection conn = txn.getConnection();
|
||||
|
||||
for (String filePath : filesPathUnderViewsDirectory) {
|
||||
LOGGER.debug(String.format("Executing VIEW script [%s].", filePath));
|
||||
LOGGER.debug("Executing VIEW script [{}].", filePath);
|
||||
|
||||
InputStream viewScript = Thread.currentThread().getContextClassLoader().getResourceAsStream(filePath);
|
||||
runScript(conn, viewScript);
|
||||
}
|
||||
|
||||
LOGGER.info(String.format("Finished execution of VIEW scripts that are under resource directory [%s].", VIEWS_DIRECTORY));
|
||||
LOGGER.info("Finished execution of VIEW scripts that are under resource directory [{}].", VIEWS_DIRECTORY);
|
||||
} catch (SQLException e) {
|
||||
String message = String.format("Unable to execute VIEW scripts due to [%s].", e.getMessage());
|
||||
LOGGER.error(message, e);
|
||||
@ -468,10 +472,10 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
String csVersion = SystemVmTemplateRegistration.parseMetadataFile();
|
||||
final CloudStackVersion sysVmVersion = CloudStackVersion.parse(csVersion);
|
||||
final CloudStackVersion currentVersion = CloudStackVersion.parse(currentVersionValue);
|
||||
SystemVmTemplateRegistration.CS_MAJOR_VERSION = String.valueOf(sysVmVersion.getMajorRelease()) + "." + String.valueOf(sysVmVersion.getMinorRelease());
|
||||
SystemVmTemplateRegistration.CS_MAJOR_VERSION = sysVmVersion.getMajorRelease() + "." + sysVmVersion.getMinorRelease();
|
||||
SystemVmTemplateRegistration.CS_TINY_VERSION = String.valueOf(sysVmVersion.getPatchRelease());
|
||||
|
||||
LOGGER.info("DB version = " + dbVersion + " Code Version = " + currentVersion);
|
||||
LOGGER.info("DB version = {} Code Version = {}", dbVersion, currentVersion);
|
||||
|
||||
if (dbVersion.compareTo(currentVersion) > 0) {
|
||||
throw new CloudRuntimeException("Database version " + dbVersion + " is higher than management software version " + currentVersionValue);
|
||||
@ -520,7 +524,7 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
ResultSet result = pstmt.executeQuery()) {
|
||||
if (result.next()) {
|
||||
String init = result.getString(1);
|
||||
LOGGER.info("init = " + DBEncryptionUtil.decrypt(init));
|
||||
LOGGER.info("init = {}", DBEncryptionUtil.decrypt(init));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -551,21 +555,11 @@ public class DatabaseUpgradeChecker implements SystemIntegrityChecker {
|
||||
return upgradedVersion;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean supportsRollingUpgrade() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream[] getPrepareScripts() {
|
||||
return new InputStream[0];
|
||||
}
|
||||
|
||||
@Override
|
||||
public void performDataMigration(Connection conn) {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream[] getCleanupScripts() {
|
||||
return new InputStream[0];
|
||||
|
||||
@ -77,8 +77,6 @@ public class Upgrade2214to30 extends Upgrade30xBase {
|
||||
encryptData(conn);
|
||||
// drop keys
|
||||
dropKeysIfExist(conn);
|
||||
//update template ID for system Vms
|
||||
//updateSystemVms(conn); This is not required as system template update is handled during 4.2 upgrade
|
||||
// update domain network ref
|
||||
updateDomainNetworkRef(conn);
|
||||
// update networks that use redundant routers to the new network offering
|
||||
|
||||
@ -62,7 +62,6 @@ public class Upgrade302to40 extends Upgrade30xBase {
|
||||
|
||||
@Override
|
||||
public void performDataMigration(Connection conn) {
|
||||
//updateVmWareSystemVms(conn); This is not required as system template update is handled during 4.2 upgrade
|
||||
correctVRProviders(conn);
|
||||
correctMultiplePhysicaNetworkSetups(conn);
|
||||
addHostDetailsUniqueKey(conn);
|
||||
|
||||
@ -65,7 +65,6 @@ public class Upgrade304to305 extends Upgrade30xBase {
|
||||
addVpcProvider(conn);
|
||||
updateRouterNetworkRef(conn);
|
||||
fixZoneUsingExternalDevices(conn);
|
||||
// updateSystemVms(conn);
|
||||
fixForeignKeys(conn);
|
||||
encryptClusterDetails(conn);
|
||||
}
|
||||
@ -81,54 +80,6 @@ public class Upgrade304to305 extends Upgrade30xBase {
|
||||
return new InputStream[] {script};
|
||||
}
|
||||
|
||||
private void updateSystemVms(Connection conn) {
|
||||
PreparedStatement pstmt = null;
|
||||
ResultSet rs = null;
|
||||
boolean VMware = false;
|
||||
try {
|
||||
pstmt = conn.prepareStatement("select distinct(hypervisor_type) from `cloud`.`cluster` where removed is null");
|
||||
rs = pstmt.executeQuery();
|
||||
while (rs.next()) {
|
||||
if ("VMware".equals(rs.getString(1))) {
|
||||
VMware = true;
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new CloudRuntimeException("Error while iterating through list of hypervisors in use", e);
|
||||
}
|
||||
// Just update the VMware system template. Other hypervisor templates are unchanged from previous 3.0.x versions.
|
||||
logger.debug("Updating VMware System Vms");
|
||||
try {
|
||||
//Get 3.0.5 VMware system Vm template Id
|
||||
pstmt = conn.prepareStatement("select id from `cloud`.`vm_template` where name = 'systemvm-vmware-3.0.5' and removed is null");
|
||||
rs = pstmt.executeQuery();
|
||||
if (rs.next()) {
|
||||
long templateId = rs.getLong(1);
|
||||
rs.close();
|
||||
pstmt.close();
|
||||
// change template type to SYSTEM
|
||||
pstmt = conn.prepareStatement("update `cloud`.`vm_template` set type='SYSTEM' where id = ?");
|
||||
pstmt.setLong(1, templateId);
|
||||
pstmt.executeUpdate();
|
||||
pstmt.close();
|
||||
// update template ID of system Vms
|
||||
pstmt = conn.prepareStatement("update `cloud`.`vm_instance` set vm_template_id = ? where type <> 'User' and hypervisor_type = 'VMware'");
|
||||
pstmt.setLong(1, templateId);
|
||||
pstmt.executeUpdate();
|
||||
pstmt.close();
|
||||
} else {
|
||||
if (VMware) {
|
||||
throw new CloudRuntimeException("3.0.5 VMware SystemVm template not found. Cannot upgrade system Vms");
|
||||
} else {
|
||||
logger.warn("3.0.5 VMware SystemVm template not found. VMware hypervisor is not used, so not failing upgrade");
|
||||
}
|
||||
}
|
||||
} catch (SQLException e) {
|
||||
throw new CloudRuntimeException("Error while updating VMware systemVm template", e);
|
||||
}
|
||||
logger.debug("Updating System Vm Template IDs Complete");
|
||||
}
|
||||
|
||||
private void addVpcProvider(Connection conn) {
|
||||
//Encrypt config params and change category to Hidden
|
||||
logger.debug("Adding vpc provider to all physical networks in the system");
|
||||
|
||||
@ -159,7 +159,7 @@ public class Upgrade41810to41900 extends DbUpgradeAbstractImpl implements DbUpgr
|
||||
try (PreparedStatement pstmt = conn.prepareStatement(createNewColumn)) {
|
||||
pstmt.execute();
|
||||
} catch (SQLException e) {
|
||||
String message = String.format("Unable to crate new backups' column date due to [%s].", e.getMessage());
|
||||
String message = String.format("Unable to create new backups' column date due to [%s].", e.getMessage());
|
||||
logger.error(message, e);
|
||||
throw new CloudRuntimeException(message, e);
|
||||
}
|
||||
|
||||
@ -0,0 +1,30 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
package com.cloud.upgrade.dao;
|
||||
|
||||
public class Upgrade42200to42210 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
|
||||
|
||||
@Override
|
||||
public String[] getUpgradableVersionRange() {
|
||||
return new String[] {"4.22.0.0", "4.22.1.0"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUpgradedVersion() {
|
||||
return "4.22.1.0";
|
||||
}
|
||||
}
|
||||
@ -0,0 +1,30 @@
|
||||
// Licensed to the Apache Software Foundation (ASF) under one
|
||||
// or more contributor license agreements. See the NOTICE file
|
||||
// distributed with this work for additional information
|
||||
// regarding copyright ownership. The ASF licenses this file
|
||||
// to you under the Apache License, Version 2.0 (the
|
||||
// "License"); you may not use this file except in compliance
|
||||
// with the License. You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing,
|
||||
// software distributed under the License is distributed on an
|
||||
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
// KIND, either express or implied. See the License for the
|
||||
// specific language governing permissions and limitations
|
||||
// under the License.
|
||||
package com.cloud.upgrade.dao;
|
||||
|
||||
public class Upgrade42210to42300 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
|
||||
|
||||
@Override
|
||||
public String[] getUpgradableVersionRange() {
|
||||
return new String[]{"4.22.1.0", "4.23.0.0"};
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUpgradedVersion() {
|
||||
return "4.23.0.0";
|
||||
}
|
||||
}
|
||||
@ -59,6 +59,9 @@ public class UsageVolumeVO implements InternalIdentity {
|
||||
@Column(name = "size")
|
||||
private long size;
|
||||
|
||||
@Column(name = "vm_id")
|
||||
private Long vmId;
|
||||
|
||||
@Column(name = "created")
|
||||
@Temporal(value = TemporalType.TIMESTAMP)
|
||||
private Date created = null;
|
||||
@ -70,13 +73,14 @@ public class UsageVolumeVO implements InternalIdentity {
|
||||
protected UsageVolumeVO() {
|
||||
}
|
||||
|
||||
public UsageVolumeVO(long id, long zoneId, long accountId, long domainId, Long diskOfferingId, Long templateId, long size, Date created, Date deleted) {
|
||||
public UsageVolumeVO(long id, long zoneId, long accountId, long domainId, Long diskOfferingId, Long templateId, Long vmId, long size, Date created, Date deleted) {
|
||||
this.volumeId = id;
|
||||
this.zoneId = zoneId;
|
||||
this.accountId = accountId;
|
||||
this.domainId = domainId;
|
||||
this.diskOfferingId = diskOfferingId;
|
||||
this.templateId = templateId;
|
||||
this.vmId = vmId;
|
||||
this.size = size;
|
||||
this.created = created;
|
||||
this.deleted = deleted;
|
||||
@ -126,4 +130,12 @@ public class UsageVolumeVO implements InternalIdentity {
|
||||
public long getVolumeId() {
|
||||
return volumeId;
|
||||
}
|
||||
|
||||
public Long getVmId() {
|
||||
return vmId;
|
||||
}
|
||||
|
||||
public void setVmId(Long vmId) {
|
||||
this.vmId = vmId;
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,6 +57,7 @@ public class UsageStorageDaoImpl extends GenericDaoBase<UsageStorageVO, Long> im
|
||||
IdSearch.and("accountId", IdSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
|
||||
IdSearch.and("id", IdSearch.entity().getEntityId(), SearchCriteria.Op.EQ);
|
||||
IdSearch.and("type", IdSearch.entity().getStorageType(), SearchCriteria.Op.EQ);
|
||||
IdSearch.and("deleted", IdSearch.entity().getDeleted(), SearchCriteria.Op.NULL);
|
||||
IdSearch.done();
|
||||
|
||||
IdZoneSearch = createSearchBuilder();
|
||||
@ -74,6 +75,7 @@ public class UsageStorageDaoImpl extends GenericDaoBase<UsageStorageVO, Long> im
|
||||
sc.setParameters("accountId", accountId);
|
||||
sc.setParameters("id", id);
|
||||
sc.setParameters("type", type);
|
||||
sc.setParameters("deleted", null);
|
||||
return listBy(sc, null);
|
||||
}
|
||||
|
||||
|
||||
@ -23,9 +23,7 @@ import com.cloud.usage.UsageVolumeVO;
|
||||
import com.cloud.utils.db.GenericDao;
|
||||
|
||||
public interface UsageVolumeDao extends GenericDao<UsageVolumeVO, Long> {
|
||||
public void removeBy(long userId, long id);
|
||||
|
||||
public void update(UsageVolumeVO usage);
|
||||
|
||||
public List<UsageVolumeVO> getUsageRecords(Long accountId, Long domainId, Date startDate, Date endDate, boolean limit, int page);
|
||||
|
||||
List<UsageVolumeVO> listByVolumeId(long volumeId, long accountId);
|
||||
}
|
||||
|
||||
@ -18,81 +18,46 @@ package com.cloud.usage.dao;
|
||||
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.TimeZone;
|
||||
|
||||
|
||||
import com.cloud.exception.CloudException;
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import com.cloud.usage.UsageVolumeVO;
|
||||
import com.cloud.utils.DateUtil;
|
||||
import com.cloud.utils.db.GenericDaoBase;
|
||||
import com.cloud.utils.db.SearchBuilder;
|
||||
import com.cloud.utils.db.SearchCriteria;
|
||||
import com.cloud.utils.db.TransactionLegacy;
|
||||
|
||||
@Component
|
||||
public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> implements UsageVolumeDao {
|
||||
|
||||
protected static final String REMOVE_BY_USERID_VOLID = "DELETE FROM usage_volume WHERE account_id = ? AND volume_id = ?";
|
||||
protected static final String UPDATE_DELETED = "UPDATE usage_volume SET deleted = ? WHERE account_id = ? AND volume_id = ? and deleted IS NULL";
|
||||
protected static final String GET_USAGE_RECORDS_BY_ACCOUNT = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted "
|
||||
protected static final String GET_USAGE_RECORDS_BY_ACCOUNT = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
|
||||
+ "FROM usage_volume " + "WHERE account_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR "
|
||||
+ " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))";
|
||||
protected static final String GET_USAGE_RECORDS_BY_DOMAIN = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted "
|
||||
protected static final String GET_USAGE_RECORDS_BY_DOMAIN = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
|
||||
+ "FROM usage_volume " + "WHERE domain_id = ? AND ((deleted IS NULL) OR (created BETWEEN ? AND ?) OR "
|
||||
+ " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?)))";
|
||||
protected static final String GET_ALL_USAGE_RECORDS = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, size, created, deleted "
|
||||
protected static final String GET_ALL_USAGE_RECORDS = "SELECT volume_id, zone_id, account_id, domain_id, disk_offering_id, template_id, vm_id, size, created, deleted "
|
||||
+ "FROM usage_volume " + "WHERE (deleted IS NULL) OR (created BETWEEN ? AND ?) OR " + " (deleted BETWEEN ? AND ?) OR ((created <= ?) AND (deleted >= ?))";
|
||||
private SearchBuilder<UsageVolumeVO> volumeSearch;
|
||||
|
||||
public UsageVolumeDaoImpl() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void removeBy(long accountId, long volId) {
|
||||
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
||||
try {
|
||||
txn.start();
|
||||
try(PreparedStatement pstmt = txn.prepareStatement(REMOVE_BY_USERID_VOLID);) {
|
||||
if (pstmt != null) {
|
||||
pstmt.setLong(1, accountId);
|
||||
pstmt.setLong(2, volId);
|
||||
pstmt.executeUpdate();
|
||||
}
|
||||
}catch (SQLException e) {
|
||||
throw new CloudException("Error removing usageVolumeVO:"+e.getMessage(), e);
|
||||
}
|
||||
txn.commit();
|
||||
} catch (Exception e) {
|
||||
txn.rollback();
|
||||
logger.warn("Error removing usageVolumeVO:"+e.getMessage(), e);
|
||||
} finally {
|
||||
txn.close();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void update(UsageVolumeVO usage) {
|
||||
TransactionLegacy txn = TransactionLegacy.open(TransactionLegacy.USAGE_DB);
|
||||
PreparedStatement pstmt = null;
|
||||
try {
|
||||
txn.start();
|
||||
if (usage.getDeleted() != null) {
|
||||
pstmt = txn.prepareAutoCloseStatement(UPDATE_DELETED);
|
||||
pstmt.setString(1, DateUtil.getDateDisplayString(TimeZone.getTimeZone("GMT"), usage.getDeleted()));
|
||||
pstmt.setLong(2, usage.getAccountId());
|
||||
pstmt.setLong(3, usage.getVolumeId());
|
||||
pstmt.executeUpdate();
|
||||
}
|
||||
txn.commit();
|
||||
} catch (Exception e) {
|
||||
txn.rollback();
|
||||
logger.warn("Error updating UsageVolumeVO", e);
|
||||
} finally {
|
||||
txn.close();
|
||||
}
|
||||
@PostConstruct
|
||||
protected void init() {
|
||||
volumeSearch = createSearchBuilder();
|
||||
volumeSearch.and("accountId", volumeSearch.entity().getAccountId(), SearchCriteria.Op.EQ);
|
||||
volumeSearch.and("volumeId", volumeSearch.entity().getVolumeId(), SearchCriteria.Op.EQ);
|
||||
volumeSearch.and("deleted", volumeSearch.entity().getDeleted(), SearchCriteria.Op.NULL);
|
||||
volumeSearch.done();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -150,11 +115,15 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
|
||||
if (tId == 0) {
|
||||
tId = null;
|
||||
}
|
||||
long size = Long.valueOf(rs.getLong(7));
|
||||
Long vmId = Long.valueOf(rs.getLong(7));
|
||||
if (vmId == 0) {
|
||||
vmId = null;
|
||||
}
|
||||
long size = Long.valueOf(rs.getLong(8));
|
||||
Date createdDate = null;
|
||||
Date deletedDate = null;
|
||||
String createdTS = rs.getString(8);
|
||||
String deletedTS = rs.getString(9);
|
||||
String createdTS = rs.getString(9);
|
||||
String deletedTS = rs.getString(10);
|
||||
|
||||
if (createdTS != null) {
|
||||
createdDate = DateUtil.parseDateString(s_gmtTimeZone, createdTS);
|
||||
@ -163,7 +132,7 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
|
||||
deletedDate = DateUtil.parseDateString(s_gmtTimeZone, deletedTS);
|
||||
}
|
||||
|
||||
usageRecords.add(new UsageVolumeVO(vId, zoneId, acctId, dId, doId, tId, size, createdDate, deletedDate));
|
||||
usageRecords.add(new UsageVolumeVO(vId, zoneId, acctId, dId, doId, tId, vmId, size, createdDate, deletedDate));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
txn.rollback();
|
||||
@ -174,4 +143,13 @@ public class UsageVolumeDaoImpl extends GenericDaoBase<UsageVolumeVO, Long> impl
|
||||
|
||||
return usageRecords;
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<UsageVolumeVO> listByVolumeId(long volumeId, long accountId) {
|
||||
SearchCriteria<UsageVolumeVO> sc = volumeSearch.create();
|
||||
sc.setParameters("accountId", accountId);
|
||||
sc.setParameters("volumeId", volumeId);
|
||||
sc.setParameters("deleted", null);
|
||||
return listBy(sc);
|
||||
}
|
||||
}
|
||||
|
||||
@ -226,10 +226,6 @@ public class UserAccountVO implements UserAccount, InternalIdentity {
|
||||
return created;
|
||||
}
|
||||
|
||||
// public void setCreated(Date created) {
|
||||
// this.created = created;
|
||||
// }
|
||||
|
||||
@Override
|
||||
public Date getRemoved() {
|
||||
return removed;
|
||||
|
||||
@ -101,7 +101,7 @@ public class UserVmDaoImpl extends GenericDaoBase<UserVmVO, Long> implements Use
|
||||
ReservationDao reservationDao;
|
||||
|
||||
private static final String LIST_PODS_HAVING_VMS_FOR_ACCOUNT =
|
||||
"SELECT pod_id FROM cloud.vm_instance WHERE data_center_id = ? AND account_id = ? AND pod_id IS NOT NULL AND (state = 'Running' OR state = 'Stopped') "
|
||||
"SELECT pod_id FROM cloud.vm_instance WHERE data_center_id = ? AND account_id = ? AND pod_id IS NOT NULL AND state IN ('Starting', 'Running', 'Stopped') "
|
||||
+ "GROUP BY pod_id HAVING count(id) > 0 ORDER BY count(id) DESC";
|
||||
|
||||
private static final String VM_DETAILS = "select vm_instance.id, "
|
||||
@ -782,7 +782,7 @@ public class UserVmDaoImpl extends GenericDaoBase<UserVmVO, Long> implements Use
|
||||
result.add(new Ternary<Integer, Integer, Integer>(rs.getInt(1), rs.getInt(2), rs.getInt(3)));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn("Error counting vms by size for dcId= " + dcId, e);
|
||||
logger.warn("Error counting vms by size for Data Center ID = " + dcId, e);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -886,7 +886,7 @@ public class VMInstanceDaoImpl extends GenericDaoBase<VMInstanceVO, Long> implem
|
||||
return rs.getLong(1);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.warn(String.format("Error counting vms by host tag for dcId= %s, hostTag= %s", dcId, hostTag), e);
|
||||
logger.warn(String.format("Error counting vms by host tag for dcId = %s, hostTag = %s", dcId, hostTag), e);
|
||||
}
|
||||
return 0L;
|
||||
}
|
||||
|
||||
@ -209,10 +209,8 @@ public class VolumeDataStoreVO implements StateObject<ObjectInDataStoreStateMach
|
||||
|
||||
public VolumeDataStoreVO(long hostId, long volumeId, Date lastUpdated, int downloadPercent, Status downloadState, String localDownloadPath, String errorString,
|
||||
String jobId, String installPath, String downloadUrl, String checksum) {
|
||||
// super();
|
||||
dataStoreId = hostId;
|
||||
this.volumeId = volumeId;
|
||||
// this.zoneId = zoneId;
|
||||
this.lastUpdated = lastUpdated;
|
||||
this.downloadPercent = downloadPercent;
|
||||
this.downloadState = downloadState;
|
||||
|
||||
@ -3,7 +3,7 @@
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliances
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
@ -0,0 +1,20 @@
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
--;
|
||||
-- Schema upgrade cleanup from 4.22.0.0 to 4.22.1.0
|
||||
--;
|
||||
@ -0,0 +1,27 @@
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
--;
|
||||
-- Schema upgrade from 4.22.0.0 to 4.22.1.0
|
||||
--;
|
||||
|
||||
-- Add vm_id column to usage_event table for volume usage events
|
||||
CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.usage_event','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with volume usage events"');
|
||||
CALL `cloud_usage`.`IDEMPOTENT_ADD_COLUMN`('cloud_usage.usage_event','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with volume usage events"');
|
||||
|
||||
-- Add vm_id column to cloud_usage.usage_volume table
|
||||
CALL `cloud_usage`.`IDEMPOTENT_ADD_COLUMN`('cloud_usage.usage_volume','vm_id', 'bigint UNSIGNED NULL COMMENT "VM ID associated with the volume usage"');
|
||||
@ -0,0 +1,20 @@
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
--;
|
||||
-- Schema upgrade cleanup from 4.22.1.0 to 4.23.0.0
|
||||
--;
|
||||
@ -0,0 +1,20 @@
|
||||
-- Licensed to the Apache Software Foundation (ASF) under one
|
||||
-- or more contributor license agreements. See the NOTICE file
|
||||
-- distributed with this work for additional information
|
||||
-- regarding copyright ownership. The ASF licenses this file
|
||||
-- to you under the Apache License, Version 2.0 (the
|
||||
-- "License"); you may not use this file except in compliance
|
||||
-- with the License. You may obtain a copy of the License at
|
||||
--
|
||||
-- http://www.apache.org/licenses/LICENSE-2.0
|
||||
--
|
||||
-- Unless required by applicable law or agreed to in writing,
|
||||
-- software distributed under the License is distributed on an
|
||||
-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
-- KIND, either express or implied. See the License for the
|
||||
-- specific language governing permissions and limitations
|
||||
-- under the License.
|
||||
|
||||
--;
|
||||
-- Schema upgrade from 4.22.1.0 to 4.23.0.0
|
||||
--;
|
||||
@ -22,7 +22,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>cloud-engine-service</artifactId>
|
||||
<packaging>war</packaging>
|
||||
|
||||
2
engine/storage/cache/pom.xml
vendored
2
engine/storage/cache/pom.xml
vendored
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -62,7 +62,6 @@ public class StorageCacheReplacementAlgorithmLRU implements StorageCacheReplacem
|
||||
/* Avoid using configDao at this time, we can't be sure that the database is already upgraded
|
||||
* and there might be fatal errors when using a dao.
|
||||
*/
|
||||
//unusedTimeInterval = NumbersUtil.parseInt(configDao.getValue(Config.StorageCacheReplacementLRUTimeInterval.key()), 30);
|
||||
}
|
||||
|
||||
public void setUnusedTimeInterval(Integer interval) {
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -87,8 +87,6 @@ import com.cloud.utils.component.ComponentContext;
|
||||
|
||||
@ContextConfiguration(locations = {"classpath:/storageContext.xml"})
|
||||
public class VolumeServiceTest extends CloudStackTestNGBase {
|
||||
// @Inject
|
||||
// ImageDataStoreProviderManager imageProviderMgr;
|
||||
@Inject
|
||||
TemplateService imageService;
|
||||
@Inject
|
||||
@ -232,23 +230,7 @@ public class VolumeServiceTest extends CloudStackTestNGBase {
|
||||
DataStore store = createImageStore();
|
||||
VMTemplateVO image = createImageData();
|
||||
TemplateInfo template = imageDataFactory.getTemplate(image.getId(), store);
|
||||
// AsyncCallFuture<TemplateApiResult> future =
|
||||
// imageService.createTemplateAsync(template, store);
|
||||
// future.get();
|
||||
template = imageDataFactory.getTemplate(image.getId(), store);
|
||||
/*
|
||||
* imageProviderMgr.configure("image Provider", new HashMap<String,
|
||||
* Object>()); VMTemplateVO image = createImageData();
|
||||
* ImageDataStoreProvider defaultProvider =
|
||||
* imageProviderMgr.getProvider("DefaultProvider");
|
||||
* ImageDataStoreLifeCycle lifeCycle =
|
||||
* defaultProvider.getLifeCycle(); ImageDataStore store =
|
||||
* lifeCycle.registerDataStore("defaultHttpStore", new
|
||||
* HashMap<String, String>());
|
||||
* imageService.registerTemplate(image.getId(),
|
||||
* store.getImageDataStoreId()); TemplateEntity te =
|
||||
* imageService.getTemplateEntity(image.getId()); return te;
|
||||
*/
|
||||
return template;
|
||||
} catch (Exception e) {
|
||||
Assert.fail("failed", e);
|
||||
@ -333,30 +315,6 @@ public class VolumeServiceTest extends CloudStackTestNGBase {
|
||||
ClusterScope scope = new ClusterScope(clusterId, podId, dcId);
|
||||
lifeCycle.attachCluster(store, scope);
|
||||
|
||||
/*
|
||||
* PrimaryDataStoreProvider provider =
|
||||
* primaryDataStoreProviderMgr.getDataStoreProvider
|
||||
* ("sample primary data store provider");
|
||||
* primaryDataStoreProviderMgr.configure("primary data store mgr",
|
||||
* new HashMap<String, Object>());
|
||||
*
|
||||
* List<PrimaryDataStoreVO> ds =
|
||||
* primaryStoreDao.findPoolByName(this.primaryName); if (ds.size()
|
||||
* >= 1) { PrimaryDataStoreVO store = ds.get(0); if
|
||||
* (store.getRemoved() == null) { return
|
||||
* provider.getDataStore(store.getId()); } }
|
||||
*
|
||||
*
|
||||
* Map<String, String> params = new HashMap<String, String>();
|
||||
* params.put("url", this.getPrimaryStorageUrl());
|
||||
* params.put("dcId", dcId.toString()); params.put("clusterId",
|
||||
* clusterId.toString()); params.put("name", this.primaryName);
|
||||
* PrimaryDataStoreInfo primaryDataStoreInfo =
|
||||
* provider.registerDataStore(params); PrimaryDataStoreLifeCycle lc
|
||||
* = primaryDataStoreInfo.getLifeCycle(); ClusterScope scope = new
|
||||
* ClusterScope(clusterId, podId, dcId); lc.attachCluster(scope);
|
||||
* return primaryDataStoreInfo;
|
||||
*/
|
||||
return store;
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
@ -376,7 +334,6 @@ public class VolumeServiceTest extends CloudStackTestNGBase {
|
||||
TemplateInfo te = createTemplate();
|
||||
VolumeVO volume = createVolume(te.getId(), primaryStore.getId());
|
||||
VolumeInfo vol = volumeFactory.getVolume(volume.getId(), primaryStore);
|
||||
// ve.createVolumeFromTemplate(primaryStore.getId(), new VHD(), te);
|
||||
AsyncCallFuture<VolumeApiResult> future = volumeService.createVolumeFromTemplateAsync(vol, primaryStore.getId(), te);
|
||||
try {
|
||||
future.get();
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
@ -24,7 +24,7 @@
|
||||
<parent>
|
||||
<groupId>org.apache.cloudstack</groupId>
|
||||
<artifactId>cloud-engine</artifactId>
|
||||
<version>4.22.0.0</version>
|
||||
<version>4.23.0.0-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
<dependencies>
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user