Merged master over to javelin to get new poms and maven build

This commit is contained in:
Alex Huang 2012-09-05 14:31:24 -07:00
commit 1d0a10c69e
505 changed files with 13475 additions and 11116 deletions

34
LICENSE
View File

@ -275,6 +275,40 @@ Within the console-proxy/js directory
jquery.js
Within the deps directory
licensed under the BSD (2-clause) for XenServerJava http://www.opensource.org/licenses/BSD-2-Clause (as follows)
Copyright (c) Citrix Systems, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1) Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2) Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from Citrix Systems, Inc http://www.citrix.com/
XenServerJava http://community.citrix.com/cdn/xs/sdks/
Within the deps/awsapi-lib directory
licensed under the ANTLR 2 License http://www.antlr2.org/license.html (as follows)

View File

@ -41,4 +41,18 @@ http://cloudstack.org/download.html
[Users Mailing list](mailto:cloudstack-users-subscribe@incubator.apache.org)
[Commits mailing list](mailto:cloudstack-commits-subscribe@incubator.apache.org)
#Maven build
Some third parties jars are non available in Maven central.
So install it with: cd deps&&sh ./install-non-oss.sh
Now you are able to activate nonoss build with adding -Dnonoss to maven cli.
to run webapp client:
mvn org.apache.tomcat.maven:tomcat7-maven-plugin:2.0-beta-1:run -pl :cloud-client-ui -am -Pclient -Dnonoss
then hit: http://localhost:8080/cloud-client-ui/
or add in your ~/.m2/settings.xml
<pluginGroups>
<pluginGroup>org.apache.tomcat.maven</pluginGroup>
</pluginGroups>
and save your fingers with mvn tomcat7:run -pl :cloud-client-ui -am -Pclient -Dnonoss
If you want to use ide debug: replace mvn with mvnDebug and attach your ide debugger to port 8000

View File

@ -17,52 +17,50 @@
under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-agent</artifactId>
<name>Apache CloudStack Agents</name>
<parent>
<groupId>com.cloud</groupId>
<artifactId>cloud-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../parent/pom.xml</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>com.cloud</groupId>
<artifactId>cloud-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>6.1.26</version>
</dependency>
<dependency>
<groupId>com.cloud</groupId>
<artifactId>cloud-utils</artifactId>
<version>${project.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
<testSourceDirectory>test</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-agent</artifactId>
<name>Apache CloudStack Agents</name>
<parent>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloudstack</artifactId>
<version>4.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloud-core</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.mortbay.jetty</groupId>
<artifactId>jetty</artifactId>
<version>6.1.26</version>
</dependency>
<dependency>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloud-utils</artifactId>
<version>${project.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
<testSourceDirectory>test</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -17,30 +17,29 @@
under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-api</artifactId>
<name>Apache CloudStack API</name>
<parent>
<groupId>com.cloud</groupId>
<artifactId>cloud-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../parent/pom.xml</relativePath>
</parent>
<dependencies>
<dependency>
<groupId>com.cloud</groupId>
<artifactId>cloud-utils</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${cs.gson.version}</version>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
</build>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-api</artifactId>
<name>Apache CloudStack API</name>
<parent>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloudstack</artifactId>
<version>4.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloud-utils</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${cs.gson.version}</version>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
</build>
</project>

View File

@ -16,15 +16,17 @@
// under the License.
package com.cloud.api.response;
import com.cloud.utils.IdentityProxy;
import com.cloud.serializer.Param;
import com.google.gson.annotations.SerializedName;
import java.util.ArrayList;
import com.cloud.serializer.Param;
import com.cloud.utils.IdentityProxy;
import com.google.gson.annotations.SerializedName;
public class ExceptionResponse extends BaseResponse {
@SerializedName("uuidList") @Param(description="List of uuids associated with this error")
private ArrayList<IdentityProxy> idList = new ArrayList<IdentityProxy>();
@SerializedName("uuidList") @Param(description="List of uuids associated with this error")
private ArrayList<IdentityProxy> idList = new ArrayList<IdentityProxy>();
@SerializedName("errorcode") @Param(description="numeric code associated with this error")
private Integer errorCode;
@ -49,17 +51,22 @@ public class ExceptionResponse extends BaseResponse {
public void setErrorText(String errorText) {
this.errorText = errorText;
}
public void addProxyObject(String tableName, Long id, String idFieldName) {
idList.add(new IdentityProxy(tableName, id, idFieldName));
return;
}
public ArrayList<IdentityProxy> getIdProxyList() {
return idList;
}
public void setCSErrorCode(int cserrcode) {
this.csErrorCode = cserrcode;
}
public void addProxyObject(String tableName, Long id, String idFieldName) {
idList.add(new IdentityProxy(tableName, id, idFieldName));
return;
}
public ArrayList<IdentityProxy> getIdProxyList() {
return idList;
}
public void setCSErrorCode(int cserrcode) {
this.csErrorCode = cserrcode;
}
@Override
public String toString() {
return ("Error Code: " + errorCode + " Error text: " + errorText);
}
}

View File

@ -158,6 +158,9 @@ public class UserVmResponse extends BaseResponse implements ControlledEntityResp
@SerializedName(ApiConstants.TAGS) @Param(description="the list of resource tags associated with vm", responseObject = ResourceTagResponse.class)
private List<ResourceTagResponse> tags;
@SerializedName(ApiConstants.SSH_KEYPAIR) @Param(description="ssh key-pair")
private String keyPairName;
public void setHypervisor(String hypervisor) {
this.hypervisor = hypervisor;
}
@ -348,4 +351,8 @@ public class UserVmResponse extends BaseResponse implements ControlledEntityResp
public void setTags(List<ResourceTagResponse> tags) {
this.tags = tags;
}
public void setKeyPairName(String keyPairName) {
this.keyPairName = keyPairName;
}
}

View File

@ -17,6 +17,8 @@
package com.cloud.storage;
public interface GuestOsCategory {
// Used by OS preference, 'None' for no OS preference
public static final String CATEGORY_NONE ="None";
long getId();
String getName();

View File

@ -1,4 +1,4 @@
#!/cygdrive/c/python26/python
#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file

View File

@ -1,20 +1,68 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" output="target/classes" path="src">
<attributes>
<attribute name="optional" value="true"/>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="con" path="org.eclipse.m2e.MAVEN2_CLASSPATH_CONTAINER">
<attributes>
<attribute name="maven.pomderived" value="true"/>
</attributes>
</classpathentry>
<classpathentry kind="output" path="target/classes"/>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry combineaccessrules="false" kind="src" path="/deps"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/antlr-2.7.6.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/apache-log4j-extras-1.0.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axiom-api-1.2.8.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axiom-impl-1.2.8.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-adb-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-ant-plugin-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-jaxbri-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-jaxws-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-jibx-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-json-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-kernel-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-transport-http-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/axis2-transport-local-1.5.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/cloud-gson.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-codec-1.4.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-collections-3.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-fileupload-1.2.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-httpclient-3.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-io-1.4.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/commons-logging-1.1.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/dom4j-1.6.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/httpcore-4.0.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/javassist-3.9.0.GA.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/jaxb-api-2.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/jaxb-impl-2.1.7.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/jaxb-xjc-2.1.7.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/jsch-0.1.42.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/json_simple-1.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/jta-1.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/junit-4.8.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/log4j-1.2.15.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/mail-1.4.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/mysql-connector-java-5.1.7-bin.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/neethi-2.0.4.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/slf4j-api-1.5.11.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/slf4j-jdk14-1.5.11.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/woden-api-1.0M8.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/woden-impl-dom-1.0M8.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/xercesImpl.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/xml-apis.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/XmlSchema-1.4.3.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/bcprov-jdk16-145.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/commons-collections-3.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/joda-time-1.5.2.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/opensaml-1.1.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/opensaml-2.2.3.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/openws-1.2.2.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/rampart-core-1.5.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/rampart-policy-1.5.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/rampart-trust-1.5.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/slf4j-api-1.5.11.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/slf4j-jdk14-1.5.11.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/velocity-1.5.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/wss4j-1.5.8.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/xmlsec-1.4.2.jar"/>
<classpathentry kind="lib" path="/deps/awsapi-lib/rampart-lib/xmltooling-1.2.0.jar"/>
<classpathentry kind="lib" path="/deps/cloud-servlet-api.jar"/>
<classpathentry kind="lib" path="/deps/cloud-javax.persistence-2.0.0.jar"/>
<classpathentry combineaccessrules="false" kind="src" path="/utils"/>
<classpathentry kind="lib" path="/deps/cloud-ehcache.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -1,51 +0,0 @@
<?xml version='1.0' encoding='utf-8'?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-configuration PUBLIC "-//Hibernate/Hibernate Configuration DTD//EN" "http://hibernate.sourceforge.net/hibernate-configuration-3.0.dtd">
<hibernate-configuration>
<session-factory>
<property name="hibernate.connection.driver_class">com.mysql.jdbc.Driver</property>
<property name="hibernate.connection.pool_size">20</property>
<property name="hibernate.connection.autocommit">false</property>
<!-- transactiion isolation level : 1 - read uncommitted, 2 - read committed, 4 - repeatable read, 8 - Serializable -->
<property name="hibernate.connection.isolation">2</property>
<property name="hibernate.order_updates">true</property>
<property name="dialect">org.hibernate.dialect.MySQLDialect</property>
<property name="show_sql">false</property>
<!-- to debug hibernate generated SQL, open following configuration property -->
<!--
<property name="show_sql">true</property>
<property name="hibernate.format_sql">true</property>
-->
<!-- Mapping files -->
<mapping resource="com/cloud/bridge/model/UserCredentials.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SHost.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/MHost.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/MHostMount.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SBucket.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SObject.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SObjectItem.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SMeta.hbm.xml"/>
<mapping resource="com/cloud/bridge/model/SAcl.hbm.xml"/>
</session-factory>
</hibernate-configuration>

View File

@ -17,99 +17,111 @@
under the License.
-->
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-awsapi</artifactId>
<version>4.0.0-SNAPSHOT</version>
<name>Apache CloudStack AWS API Bridge</name>
<parent>
<groupId>com.cloud</groupId>
<artifactId>cloud-parent</artifactId>
<version>4.0.0-SNAPSHOT</version>
<relativePath>../parent/pom.xml</relativePath>
</parent>
<dependencies>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<artifactId>cloud-awsapi</artifactId>
<version>4.0.0-SNAPSHOT</version>
<name>Apache CloudStack AWS API Bridge</name>
<parent>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloudstack</artifactId>
<version>4.0.0-SNAPSHOT</version>
</parent>
<dependencies>
<dependency>
<groupId>org.apache.cloudstack</groupId>
<artifactId>cloud-utils</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.axis2</groupId>
<artifactId>axis2</artifactId>
<version>${cs.axis2.version}</version>
</dependency>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId>
<version>${cs.axiom.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-annotations</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${cs.log4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.neethi</groupId>
<artifactId>neethi</artifactId>
<version>${cs.neethi.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${cs.gson.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${cs.codec.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${cs.servlet.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.jasypt</groupId>
<artifactId>jasypt</artifactId>
<version>${cs.jasypt.version}</version>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
<!--
<testSourceDirectory>test</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
-->
</build>
<dependency>
<groupId>org.apache.ws.commons.axiom</groupId>
<artifactId>axiom-api</artifactId>
<version>${cs.axiom.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-core</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-annotations</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>org.hibernate</groupId>
<artifactId>hibernate-entitymanager</artifactId>
<version>${cs.hibernate.version}</version>
</dependency>
<dependency>
<groupId>log4j</groupId>
<artifactId>log4j</artifactId>
<version>${cs.log4j.version}</version>
</dependency>
<dependency>
<groupId>org.apache.neethi</groupId>
<artifactId>neethi</artifactId>
<version>${cs.neethi.version}</version>
</dependency>
<dependency>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
<version>${cs.gson.version}</version>
</dependency>
<dependency>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
<version>${cs.codec.version}</version>
</dependency>
<dependency>
<groupId>javax.servlet</groupId>
<artifactId>servlet-api</artifactId>
<version>${cs.servlet.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
<dependency>
<groupId>org.jasypt</groupId>
<artifactId>jasypt</artifactId>
<version>${cs.jasypt.version}</version>
</dependency>
</dependencies>
<build>
<defaultGoal>install</defaultGoal>
<sourceDirectory>src</sourceDirectory>
<resources>
<resource>
<directory>src</directory>
<excludes>
<exclude>**/*.java</exclude>
</excludes>
</resource>
</resources>
<!--
<testSourceDirectory>test</testSourceDirectory>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
-->
</build>
</project>

View File

@ -37,15 +37,16 @@ import java.io.InputStream;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import com.cloud.bridge.model.UserCredentials;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl;
import com.cloud.bridge.service.UserContext;
import com.cloud.bridge.util.AuthenticationUtils;
import com.cloud.utils.component.ComponentLocator;
public class AuthenticationHandler implements Handler {
protected final static Logger logger = Logger.getLogger(AuthenticationHandler.class);
protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class);
private DocumentBuilderFactory dbf = null;
protected HandlerDescription handlerDesc = new HandlerDescription( "EC2AuthenticationHandler" );
@ -111,13 +112,15 @@ public class AuthenticationHandler implements Handler {
logger.debug( "X509 cert's uniqueId: " + uniqueId );
// -> find the Cloud API key and the secret key from the cert's uniqueId
UserCredentialsDao credentialDao = new UserCredentialsDao();
/* UserCredentialsDao credentialDao = new UserCredentialsDao();
UserCredentials cloudKeys = credentialDao.getByCertUniqueId( uniqueId );
if ( null == cloudKeys ) {
logger.error( "Cert does not map to Cloud API keys: " + uniqueId );
throw new AxisFault( "User not properly registered: Certificate does not map to Cloud API Keys", "Client.Blocked" );
}
else UserContext.current().initContext( cloudKeys.getAccessKey(), cloudKeys.getSecretKey(), cloudKeys.getAccessKey(), "SOAP Request", null );
*/
UserCredentialsVO cloudKeys = ucDao.getByCertUniqueId(uniqueId);
if ( null == cloudKeys ) {
logger.error( "Cert does not map to Cloud API keys: " + uniqueId );
throw new AxisFault( "User not properly registered: Certificate does not map to Cloud API Keys", "Client.Blocked" );
}
else UserContext.current().initContext( cloudKeys.getAccessKey(), cloudKeys.getSecretKey(), cloudKeys.getAccessKey(), "SOAP Request", null );
//System.out.println( "end of cert match: " + UserContext.current().getSecretKey());
}
}

View File

@ -29,10 +29,11 @@ import org.apache.axis2.AxisFault;
import org.apache.axis2.description.HandlerDescription;
import org.apache.axis2.description.Parameter;
import com.cloud.bridge.model.UserCredentials;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl;
import com.cloud.bridge.service.UserContext;
import com.cloud.bridge.util.S3SoapAuth;
import com.cloud.utils.component.ComponentLocator;
/*
* For SOAP compatibility.
@ -40,7 +41,7 @@ import com.cloud.bridge.util.S3SoapAuth;
public class AuthenticationHandler implements Handler {
protected final static Logger logger = Logger.getLogger(AuthenticationHandler.class);
protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class);
protected HandlerDescription handlerDesc = new HandlerDescription( "default handler" );
private String name = "S3AuthenticationHandler";
@ -190,8 +191,7 @@ public class AuthenticationHandler implements Handler {
private String lookupSecretKey( String accessKey )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
UserCredentialsDao credentialDao = new UserCredentialsDao();
UserCredentials cloudKeys = credentialDao.getByAccessKey( accessKey );
UserCredentialsVO cloudKeys = ucDao.getByAccessKey( accessKey );
if ( null == cloudKeys ) {
logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" );
return null;

View File

@ -21,8 +21,8 @@ import org.apache.axis2.description.AxisService;
import org.apache.axis2.engine.ServiceLifeCycle;
import org.apache.log4j.Logger;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.service.controller.s3.ServiceProvider;
import com.cloud.utils.db.Transaction;
/**
@ -38,7 +38,7 @@ public class ServiceEngineLifecycle implements ServiceLifeCycle {
public void startUp(ConfigurationContext config, AxisService service) {
// initialize service provider during Axis engine startup
try{
UserCredentialsDao.preCheckTableExistence();
//UserCredentialsDao.preCheckTableExistence();
ServiceProvider.getInstance();
ServiceEngineLifecycle.initialized = true;
}catch(Exception e){

View File

@ -0,0 +1,59 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="bucket_policies")
public class BucketPolicyVO {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private long id;
@Column(name="BucketName")
private String bucketName;
@Column(name="OwnerCanonicalID")
private String ownerCanonicalID;
@Column(name="Policy")
private String policy;
public BucketPolicyVO() { }
public BucketPolicyVO(String bucketName, String client, String policy) {
this.bucketName = bucketName;
this.ownerCanonicalID = client;
this.policy = policy;
}
public long getId() {
return id;
}
public void setId(long id) {
this.id = id;
}
public String getBucketName() {
return bucketName;
}
public void setBucketName(String bucketName) {
this.bucketName = bucketName;
}
public String getOwnerCanonicalID() {
return ownerCanonicalID;
}
public void setOwnerCanonicalID(String ownerCanonicalID) {
this.ownerCanonicalID = ownerCanonicalID;
}
public String getPolicy() {
return policy;
}
public void setPolicy(String policy) {
this.policy = policy;
}
}

View File

@ -0,0 +1,34 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Table;
@Entity
@Table(name="account")
public class CloudStackAccountVO {
@Column(name="uuid")
private String uuid;
@Column(name="default_zone_id")
private Long defaultZoneId = null;
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public Long getDefaultZoneId() {
return defaultZoneId;
}
public void setDefaultZoneId(Long defaultZoneId) {
this.defaultZoneId = defaultZoneId;
}
}

View File

@ -0,0 +1,30 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.cloud.utils.db.DB;
@Entity
@Table(name="configuration")
public class CloudStackConfigurationVO {
@Id
@Column(name="name")
private String name;
@Column(name="value", length=4095)
private String value;
@DB
public String getValue() {
return value;
}
public String getName() {
return name;
}
}

View File

@ -0,0 +1,51 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="disk_offering")
public class CloudStackServiceOfferingVO {
@Id
@Column(name="id")
private String id;
@Column(name="name")
private String name;
@Column(name="domain_id")
private String domainId;
public String getId() {
return id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDomainId() {
return domainId;
}
public void setDomainId(String domainId) {
this.domainId = domainId;
}
}

View File

@ -1,55 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.MHost" table="mhost" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="host">
<column name="Host" />
</property>
<property name="hostKey">
<column name="MHostKey" />
</property>
<property name="version">
<column name="Version" />
</property>
<property name="lastHeartbeatTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="LastHeartbeatTime" />
</property>
<set name="localSHosts" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="MHostID" />
<one-to-many class="com.cloud.bridge.model.SHost" />
</set>
<set name="mounts" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="MHostID" />
<one-to-many class="com.cloud.bridge.model.MHostMount" />
</set>
</class>
</hibernate-mapping>

View File

@ -1,46 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.MHostMount" table="mhost_mount" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="mountPath">
<column name="MountPath" />
</property>
<property name="lastMountTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="LastMountTime" />
</property>
<many-to-one name="mhost" column="MHostID"
class="com.cloud.bridge.model.MHost"
cascade="none"
/>
<many-to-one name="shost" column="SHostID"
class="com.cloud.bridge.model.SHost"
cascade="none"
/>
</class>
</hibernate-mapping>

View File

@ -19,18 +19,46 @@ package com.cloud.bridge.model;
import java.io.Serializable;
import java.util.Date;
public class MHostMount implements Serializable {
private static final long serialVersionUID = -1119494563131099642L;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
@Entity
@Table(name="mhost_mount")
public class MHostMountVO implements Serializable {
private static final long serialVersionUID = -1119494563131099642L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
private MHost mhost;
private SHost shost;
@Column(name="MHostID")
private long mHostID;
@Column(name="SHostID")
private long sHostID;
@Transient
private MHostVO mhost;
@Transient
private SHostVO shost;
@Column(name="MountPath")
private String mountPath;
@Column(name="LastMountTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date lastMountTime;
public MHostMount() {
public MHostMountVO() {
}
public Long getId() {
@ -41,19 +69,35 @@ public class MHostMount implements Serializable {
this.id = id;
}
public MHost getMhost() {
public long getmHostID() {
return mHostID;
}
public void setmHostID(long mHostID) {
this.mHostID = mHostID;
}
public long getsHostID() {
return sHostID;
}
public void setsHostID(long sHostID) {
this.sHostID = sHostID;
}
public MHostVO getMhost() {
return mhost;
}
public void setMhost(MHost mhost) {
public void setMhost(MHostVO mhost) {
this.mhost = mhost;
}
public SHost getShost() {
public SHostVO getShost() {
return shost;
}
public void setShost(SHost shost) {
public void setShost(SHostVO shost) {
this.shost = shost;
}
@ -78,11 +122,11 @@ public class MHostMount implements Serializable {
if(this == other)
return true;
if(!(other instanceof MHostMount))
if(!(other instanceof MHostMountVO))
return false;
return getMhost().equals(((MHostMount)other).getMhost()) &&
getShost().equals(((MHostMount)other).getShost());
return getMhost().equals(((MHostMountVO)other).getMhost()) &&
getShost().equals(((MHostMountVO)other).getShost());
}
@Override

View File

@ -21,20 +21,46 @@ import java.util.Date;
import java.util.HashSet;
import java.util.Set;
public class MHost implements Serializable {
private static final long serialVersionUID = 4848254624679753930L;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
@Entity
@Table(name="mhost")
public class MHostVO implements Serializable {
private static final long serialVersionUID = 4848254624679753930L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="MHostKey", nullable=false)
private String hostKey;
@Column(name="Host")
private String host;
@Column(name="Version")
private String version;
@Column(name="LastHeartbeatTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date lastHeartbeatTime;
@Transient
private Set<SHostVO> localSHosts = new HashSet<SHostVO>();
@Transient
private Set<MHostMountVO> mounts = new HashSet<MHostMountVO>();
private Set<SHost> localSHosts = new HashSet<SHost>();
private Set<MHostMount> mounts = new HashSet<MHostMount>();
public MHost() {
public MHostVO() {
}
public Long getId() {
@ -77,19 +103,19 @@ public class MHost implements Serializable {
this.lastHeartbeatTime = lastHeartbeatTime;
}
public Set<SHost> getLocalSHosts() {
public Set<SHostVO> getLocalSHosts() {
return localSHosts;
}
public void setLocalSHosts(Set<SHost> localSHosts) {
public void setLocalSHosts(Set<SHostVO> localSHosts) {
this.localSHosts = localSHosts;
}
public Set<MHostMount> getMounts() {
public Set<MHostMountVO> getMounts() {
return mounts;
}
public void setMounts(Set<MHostMount> mounts) {
public void setMounts(Set<MHostMountVO> mounts) {
this.mounts = mounts;
}
@ -98,10 +124,10 @@ public class MHost implements Serializable {
if(this == other)
return true;
if(!(other instanceof MHost))
if(!(other instanceof MHostVO))
return false;
return hostKey == ((MHost)other).getHostKey();
return hostKey == ((MHostVO)other).getHostKey();
}
@Override

View File

@ -0,0 +1,108 @@
package com.cloud.bridge.model;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
@Entity
@Table(name="multipart_parts")
public class MultiPartPartsVO {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="UploadID")
private Long uploadid;
@Column(name="partNumber")
private int partNumber;
@Column(name="MD5")
private String md5;
@Column(name="StoredPath")
private String storedPath;
@Column(name="StoredSize")
private Long storedSize;
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
public MultiPartPartsVO() { }
public MultiPartPartsVO(int uploadId, int partNumber, String md5,
String storedPath, int size, Date date) {
this.uploadid = new Long(uploadId);
this.partNumber = partNumber;
this.md5 = md5;
this.storedPath = storedPath;
this.storedSize = new Long(size);
this.createTime = date;
}
public Long getUploadid() {
return uploadid;
}
public void setUploadid(Long uploadid) {
this.uploadid = uploadid;
}
public int getPartNumber() {
return partNumber;
}
public void setPartNumber(int partNumber) {
this.partNumber = partNumber;
}
public String getMd5() {
return md5;
}
public void setMd5(String md5) {
this.md5 = md5;
}
public String getStoredPath() {
return storedPath;
}
public void setStoredPath(String storedPath) {
this.storedPath = storedPath;
}
public Long getStoredSize() {
return storedSize;
}
public void setStoredSize(Long storedSize) {
this.storedSize = storedSize;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Long getId() {
return id;
}
}

View File

@ -0,0 +1,94 @@
package com.cloud.bridge.model;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
@Entity
@Table(name="multipart_uploads")
public class MultiPartUploadsVO {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="AccessKey")
private String accessKey;
@Column(name="BucketName")
private String bucketName;
@Column(name="NameKey")
private String nameKey;
@Column(name="x_amz_acl")
private String amzAcl;
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
public MultiPartUploadsVO() {}
public MultiPartUploadsVO(String accessKey, String bucketName, String key, String cannedAccess, Date tod) {
this.accessKey = accessKey;
this.bucketName = bucketName;
this.nameKey = key;
this.amzAcl = cannedAccess;
this.createTime = tod;
}
public Long getId() {
return id;
}
public String getAccessKey() {
return accessKey;
}
public void setAccessKey(String accessKey) {
this.accessKey = accessKey;
}
public String getBucketName() {
return bucketName;
}
public void setBucketName(String bucketName) {
this.bucketName = bucketName;
}
public String getNameKey() {
return nameKey;
}
public void setNameKey(String nameKey) {
this.nameKey = nameKey;
}
public String getAmzAcl() {
return amzAcl;
}
public void setAmzAcl(String amzAcl) {
this.amzAcl = amzAcl;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
}

View File

@ -0,0 +1,59 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="multipart_meta")
public class MultipartMetaVO {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="UploadID")
private long uploadID;
@Column(name="Name")
private String name;
@Column(name="Value")
private String value;
public long getID() {
return id;
}
public long getUploadID() {
return uploadID;
}
public void setUploadID(long uploadID) {
this.uploadID = uploadID;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
}

View File

@ -0,0 +1,46 @@
package com.cloud.bridge.model;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="offering_bundle")
public class OfferingBundleVO {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private long id;
@Column(name="AmazonEC2Offering")
private String amazonOffering;
@Column(name="CloudStackOffering")
private String cloudstackOffering;
public long getID() {
return id;
}
public String getAmazonOffering() {
return amazonOffering;
}
public void setAmazonOffering(String amazonOffering) {
this.amazonOffering = amazonOffering;
}
public String getCloudstackOffering() {
return cloudstackOffering;
}
public void setCloudstackOffering(String cloudstackOffering) {
this.cloudstackOffering = cloudstackOffering;
}
}

View File

@ -1,54 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SAcl" table="acl" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="target">
<column name="Target" />
</property>
<property name="targetId">
<column name="TargetID" />
</property>
<property name="granteeType">
<column name="GranteeType" />
</property>
<property name="granteeCanonicalId">
<column name="GranteeCanonicalID" />
</property>
<property name="permission">
<column name="Permission" />
</property>
<property name="grantOrder">
<column name="GrantOrder" />
</property>
<property name="createTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="CreateTime" />
</property>
<property name="lastModifiedTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="LastModifiedTime" />
</property>
</class>
</hibernate-mapping>

View File

@ -37,8 +37,7 @@ import com.cloud.bridge.util.Triple;
* < permission1, permission2, symbol >
* when given an aclRequestString, a target (i.e. bucket or object) and the ID of the owner.
*/
public class SAcl implements Serializable {
private static final long serialVersionUID = 7900837117165018850L;
public interface SAcl {
public static final int GRANTEE_USER = 0;
public static final int GRANTEE_ALLUSERS = 1;
@ -52,95 +51,6 @@ public class SAcl implements Serializable {
public static final int PERMISSION_WRITE_ACL = 8;
public static final int PERMISSION_FULL = (PERMISSION_READ | PERMISSION_WRITE | PERMISSION_READ_ACL | PERMISSION_WRITE_ACL);
private Long id;
private String target;
private long targetId;
private int granteeType;
private String granteeCanonicalId;
private int permission;
private int grantOrder;
private Date createTime;
private Date lastModifiedTime;
public SAcl() {
}
public Long getId() {
return id;
}
private void setId(Long id) {
this.id = id;
}
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public long getTargetId() {
return targetId;
}
public void setTargetId(long targetId) {
this.targetId = targetId;
}
public int getGranteeType() {
return granteeType;
}
public void setGranteeType(int granteeType) {
this.granteeType = granteeType;
}
public String getGranteeCanonicalId() {
return granteeCanonicalId;
}
public void setGranteeCanonicalId(String granteeCanonicalId) {
this.granteeCanonicalId = granteeCanonicalId;
}
public int getPermission() {
return permission;
}
public void setPermission(int permission) {
this.permission = permission;
}
public int getGrantOrder() {
return grantOrder;
}
public void setGrantOrder(int grantOrder) {
this.grantOrder = grantOrder;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getLastModifiedTime() {
return lastModifiedTime;
}
public void setLastModifiedTime(Date lastModifiedTime) {
this.lastModifiedTime = lastModifiedTime;
}
/** Return an OrderedPair
* < permission, grantee >
* comprising
@ -153,9 +63,9 @@ public class SAcl implements Serializable {
* @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs
* @param target - Either "SBucket" or otherwise assumed to be for a single object item
*/
public static OrderedPair <Integer,Integer> getCannedAccessControls ( String aclRequestString, String target )
throws UnsupportedException
{
//public static OrderedPair <Integer,Integer> getCannedAccessControls ( String aclRequestString, String target );
/* {
if ( aclRequestString.equalsIgnoreCase( "public-read" ))
// All users granted READ access.
return new OrderedPair <Integer,Integer> (PERMISSION_READ,GRANTEE_ALLUSERS);
@ -184,7 +94,7 @@ public class SAcl implements Serializable {
}
else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" );
}
*/
/** Return a Triple
* < permission1, permission2, symbol >
* comprising
@ -200,8 +110,8 @@ public class SAcl implements Serializable {
* @param target - Either "SBucket" or otherwise assumed to be for a single object item
* @param ownerID - An ID for the owner, if used in place of symbols "A" or "*"
*/
public static Triple <Integer,Integer,String> getCannedAccessControls ( String aclRequestString, String target, String ownerID )
throws UnsupportedException
//public static Triple <Integer,Integer,String> getCannedAccessControls ( String aclRequestString, String target, String ownerID );
/* throws UnsupportedException
{
if ( aclRequestString.equalsIgnoreCase( "public-read" ))
// Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ access.
@ -235,5 +145,5 @@ public class SAcl implements Serializable {
}
else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" );
}
*/
}

View File

@ -0,0 +1,254 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.model;
import java.util.Date;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import com.cloud.bridge.service.exception.UnsupportedException;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.bridge.util.Triple;
/**
* A model of stored ACLs to remember the ACL permissions per canonicalUserID per grantee
* Hold the AWS S3 grantee and permission constants.
*
* This class implements two forms of getCannedAccessControls mappings, as static methods,
*
* (a) an OrderedPair which provides a maplet across
* < permission, grantee >
* when given an aclRequestString and a target (i.e. bucket or object),
*
* (b) a Triplet
* < permission1, permission2, symbol >
* when given an aclRequestString, a target (i.e. bucket or object) and the ID of the owner.
*/
@Entity
@Table(name="acl")
public class SAclVO implements SAcl {
private static final long serialVersionUID = 7900837117165018850L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="Target")
private String target;
@Column(name="TargetID")
private long targetId;
@Column(name="GranteeType")
private int granteeType;
@Column(name="GranteeCanonicalID")
private String granteeCanonicalId;
@Column(name="Permission")
private int permission;
@Column(name="GrantOrder")
private int grantOrder;
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
@Column(name="LastModifiedTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date lastModifiedTime;
public SAclVO() {
}
public Long getId() {
return id;
}
private void setId(Long id) {
this.id = id;
}
public String getTarget() {
return target;
}
public void setTarget(String target) {
this.target = target;
}
public long getTargetId() {
return targetId;
}
public void setTargetId(long targetId) {
this.targetId = targetId;
}
public int getGranteeType() {
return granteeType;
}
public void setGranteeType(int granteeType) {
this.granteeType = granteeType;
}
public String getGranteeCanonicalId() {
return granteeCanonicalId;
}
public void setGranteeCanonicalId(String granteeCanonicalId) {
this.granteeCanonicalId = granteeCanonicalId;
}
public int getPermission() {
return permission;
}
public void setPermission(int permission) {
this.permission = permission;
}
public int getGrantOrder() {
return grantOrder;
}
public void setGrantOrder(int grantOrder) {
this.grantOrder = grantOrder;
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public Date getLastModifiedTime() {
return lastModifiedTime;
}
public void setLastModifiedTime(Date lastModifiedTime) {
this.lastModifiedTime = lastModifiedTime;
}
/** Return an OrderedPair
* < permission, grantee >
* comprising
* a permission - which is one of SAcl.PERMISSION_PASS, SAcl.PERMISSION_NONE, SAcl.PERMISSION_READ,
* SAcl.PERMISSION_WRITE, SAcl.PERMISSION_READ_ACL, SAcl.PERMISSION_WRITE_ACL, SAcl.PERMISSION_FULL
* a grantee - which is one of GRANTEE_ALLUSERS, GRANTEE_AUTHENTICATED, GRANTEE_USER
*
* Access controls that are specified via the "x-amz-acl:" headers in REST requests for buckets.
* The ACL request string is treated as a request for a known cannedAccessPolicy
* @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs
* @param target - Either "SBucket" or otherwise assumed to be for a single object item
*/
public static OrderedPair <Integer,Integer> getCannedAccessControls ( String aclRequestString, String target )
throws UnsupportedException
{
if ( aclRequestString.equalsIgnoreCase( "public-read" ))
// All users granted READ access.
return new OrderedPair <Integer,Integer> (PERMISSION_READ,GRANTEE_ALLUSERS);
else if (aclRequestString.equalsIgnoreCase( "public-read-write" ))
// All users granted READ and WRITE access
return new OrderedPair <Integer,Integer> ((PERMISSION_READ | PERMISSION_WRITE),GRANTEE_ALLUSERS);
else if (aclRequestString.equalsIgnoreCase( "authenticated-read" ))
// Authenticated users have READ access
return new OrderedPair <Integer,Integer> (PERMISSION_READ,GRANTEE_AUTHENTICATED);
else if (aclRequestString.equalsIgnoreCase( "private" ))
// Only Owner gets FULL_CONTROL
return new OrderedPair <Integer,Integer> (PERMISSION_FULL,GRANTEE_USER);
else if (aclRequestString.equalsIgnoreCase( "bucket-owner-read" ))
{
// Object Owner gets FULL_CONTROL, Bucket Owner gets READ
if ( target.equalsIgnoreCase( "SBucket" ))
return new OrderedPair <Integer,Integer> (PERMISSION_READ, GRANTEE_USER);
else
return new OrderedPair <Integer,Integer> (PERMISSION_FULL, GRANTEE_USER);
}
else if (aclRequestString.equalsIgnoreCase( "bucket-owner-full-control" ))
{
// Object Owner gets FULL_CONTROL, Bucket Owner gets FULL_CONTROL
// This is equivalent to private when used with PUT Bucket
return new OrderedPair <Integer,Integer> (PERMISSION_FULL,GRANTEE_USER);
}
else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" );
}
/** Return a Triple
* < permission1, permission2, symbol >
* comprising
* two permissions - which is one of SAcl.PERMISSION_PASS, SAcl.PERMISSION_NONE, SAcl.PERMISSION_READ,
* SAcl.PERMISSION_WRITE, SAcl.PERMISSION_READ_ACL, SAcl.PERMISSION_WRITE_ACL, SAcl.PERMISSION_FULL
* permission1 applies to objects, permission2 applies to buckets.
* a symbol to indicate whether the principal is anonymous (i.e. string "A") or authenticated user (i.e.
* string "*") - otherwise null indicates a single ACL for all users.
*
* Access controls that are specified via the "x-amz-acl:" headers in REST requests for buckets.
* The ACL request string is treated as a request for a known cannedAccessPolicy
* @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs
* @param target - Either "SBucket" or otherwise assumed to be for a single object item
* @param ownerID - An ID for the owner, if used in place of symbols "A" or "*"
*/
public static Triple <Integer,Integer,String> getCannedAccessControls ( String aclRequestString, String target, String ownerID )
throws UnsupportedException
{
if ( aclRequestString.equalsIgnoreCase( "public-read" ))
// Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ access.
return new Triple <Integer, Integer, String> (PERMISSION_FULL, PERMISSION_READ,"A");
else if (aclRequestString.equalsIgnoreCase( "public-read-write" ))
// Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ and WRITE access
return new Triple <Integer, Integer, String> (PERMISSION_FULL, (PERMISSION_READ | PERMISSION_WRITE),"A");
else if (aclRequestString.equalsIgnoreCase( "authenticated-read" ))
// Owner gets FULL_CONTROL and ANY principal authenticated as a registered S3 user (the '*' symbol here) is granted READ access
return new Triple <Integer, Integer, String> (PERMISSION_FULL, PERMISSION_READ,"*");
else if (aclRequestString.equalsIgnoreCase( "private" ))
// This is termed the "private" or default ACL, "Owner gets FULL_CONTROL"
return new Triple <Integer, Integer, String> (PERMISSION_FULL, PERMISSION_FULL,null);
else if (aclRequestString.equalsIgnoreCase( "bucket-owner-read" ))
{
// Object Owner gets FULL_CONTROL, Bucket Owner gets READ
// This is equivalent to private when used with PUT Bucket
if ( target.equalsIgnoreCase( "SBucket" ))
return new Triple <Integer, Integer, String> (PERMISSION_FULL,PERMISSION_FULL ,null);
else
return new Triple <Integer, Integer, String> (PERMISSION_FULL,PERMISSION_READ,ownerID);
}
else if (aclRequestString.equalsIgnoreCase( "bucket-owner-full-control" ))
{
// Object Owner gets FULL_CONTROL, Bucket Owner gets FULL_CONTROL
// This is equivalent to private when used with PUT Bucket
if ( target.equalsIgnoreCase( "SBucket" ))
return new Triple <Integer, Integer, String> (PERMISSION_FULL, PERMISSION_FULL, null);
else
return new Triple <Integer, Integer, String> (PERMISSION_FULL,PERMISSION_FULL, ownerID);
}
else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" );
}
}

View File

@ -1,56 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SBucket" table="sbucket" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="name">
<column name="Name" />
</property>
<property name="ownerCanonicalId">
<column name="OwnerCanonicalId" />
</property>
<property name="createTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="CreateTime" />
</property>
<property name="versioningStatus">
<column name="VersioningStatus" />
</property>
<many-to-one name="shost" column="SHostID"
class="com.cloud.bridge.model.SHost"
cascade="none"
/>
<set name="objectsInBucket" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="SBucketID" />
<one-to-many class="com.cloud.bridge.model.SObject" />
</set>
</class>
</hibernate-mapping>

View File

@ -31,14 +31,13 @@ import java.util.Set;
* VersioningStatus
* For ORM see "com/cloud/bridge/model/SHost.hbm.xml"
*/
public class SBucket implements Serializable {
private static final long serialVersionUID = 7430267766019671273L;
public static final int VERSIONING_NULL = 0;
public interface SBucket {
public static final int VERSIONING_NULL = 0;
public static final int VERSIONING_ENABLED = 1;
public static final int VERSIONING_SUSPENDED = 2;
private Long id;
/* private Long id;
private String name;
private String ownerCanonicalId;
@ -124,5 +123,5 @@ public class SBucket implements Serializable {
@Override
public int hashCode() {
return getName().hashCode();
}
}*/
}

View File

@ -0,0 +1,169 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.model;
import java.io.Serializable;
import java.util.Date;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
/**
* Holds the relation
* Id,
* Name,
* OwnerCanonicalId,
* SHost,
* CreateTime,
* VersioningStatus
* For ORM see "com/cloud/bridge/model/SHost.hbm.xml"
*/
@Entity
@Table(name="sbucket")
public class SBucketVO implements SBucket {
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="Name")
private String name;
@Column(name="OwnerCanonicalID")
private String ownerCanonicalId;
@Column(name="SHostID")
private long shostID;
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
@Column(name="VersioningStatus")
private int versioningStatus;
@Transient
private SHostVO shost;
@Transient
private Set<SObjectVO> objectsInBucket = new HashSet<SObjectVO>();
public SBucketVO() {
versioningStatus = VERSIONING_NULL;
this.createTime = new Date();
}
public SBucketVO(String bucketName, Date currentGMTTime,
String canonicalUserId, SHostVO first) {
this.versioningStatus = VERSIONING_NULL;
this.name = bucketName;
this.createTime = new Date();
this.ownerCanonicalId = canonicalUserId;
this.shost = first;
this.shostID = shost.getId();
}
public Long getId() {
return id;
}
private void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getOwnerCanonicalId() {
return ownerCanonicalId;
}
public void setOwnerCanonicalId(String ownerCanonicalId) {
this.ownerCanonicalId = ownerCanonicalId;
}
public long getShostID() {
return shostID;
}
public void setShostID(long shostID) {
this.shostID = shostID;
}
public SHostVO getShost() {
return shost;
}
public void setShost(SHostVO shost) {
this.shost = shost;
}
public Date getCreateTime() {
return createTime;
}
public int getVersioningStatus() {
return versioningStatus;
}
public void setVersioningStatus( int versioningStatus ) {
this.versioningStatus = versioningStatus;
}
public Set<SObjectVO> getObjectsInBucket() {
return objectsInBucket;
}
public void setObjectsInBucket(Set<SObjectVO> objectsInBucket) {
this.objectsInBucket = objectsInBucket;
}
@Override
public boolean equals(Object other) {
if(this == other)
return true;
if(!(other instanceof SBucketVO))
return false;
return getName().equals(((SBucketVO)other).getName());
}
@Override
public int hashCode() {
return getName().hashCode();
}
}

View File

@ -1,60 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SHost" table="shost" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="host">
<column name="Host" />
</property>
<property name="hostType">
<column name="HostType" />
</property>
<property name="exportRoot">
<column name="ExportRoot" />
</property>
<property name="userOnHost">
<column name="UserOnHost"/>
</property>
<property name="userPassword">
<column name="UserPassword"/>
</property>
<many-to-one name="mhost" column="MHostID"
class="com.cloud.bridge.model.MHost"
cascade="none"
/>
<set name="buckets" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="SHostID" />
<one-to-many class="com.cloud.bridge.model.SBucket" />
</set>
<set name="mounts" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="SHostID" />
<one-to-many class="com.cloud.bridge.model.MHostMount" />
</set>
</class>
</hibernate-mapping>

View File

@ -20,17 +20,19 @@ import java.io.Serializable;
import java.util.HashSet;
import java.util.Set;
public class SHost implements Serializable {
private static final long serialVersionUID = 213346565810468018L;
public interface SHost {
public static final int STORAGE_HOST_TYPE_LOCAL = 0;
public static final int STORAGE_HOST_TYPE_NFS = 1;
private Long id;
public static enum StorageHostType {
STORAGE_HOST_TYPE_LOCAL, //0
STORAGE_HOST_TYPE_NFS //1
}
/* private Long id;
private String host;
private int hostType;
private MHost mhost;
private MHostVO mhost;
private String exportRoot;
private String userOnHost;
private String userPassword;
@ -89,11 +91,11 @@ public class SHost implements Serializable {
this.userPassword = userPassword;
}
public MHost getMhost() {
public MHostVO getMhost() {
return mhost;
}
public void setMhost(MHost mhost) {
public void setMhost(MHostVO mhost) {
this.mhost = mhost;
}
@ -111,5 +113,5 @@ public class SHost implements Serializable {
public void setMounts(Set<MHostMount> mounts) {
this.mounts = mounts;
}
}*/
}

View File

@ -0,0 +1,152 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.model;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Transient;
@Entity
@Table(name="shost")
public class SHostVO implements SHost{
private static final long serialVersionUID = 213346565810468018L;
public static final int STORAGE_HOST_TYPE_LOCAL = 0;
public static final int STORAGE_HOST_TYPE_NFS = 1;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="Host")
private String host;
@Column(name="HostType")
private int hostType;
@Column(name="MHostID")
private long mhostid;
@Column(name="ExportRoot")
private String exportRoot;
@Column(name="UserOnHost")
private String userOnHost;
@Column(name="UserPassword")
private String userPassword;
@Transient
private MHostVO mhost;
@Transient
private Set<SBucket> buckets = new HashSet<SBucket>();
@Transient
private Set<MHostMountVO> mounts = new HashSet<MHostMountVO>();
public SHostVO() {
}
public Long getId() {
return id;
}
private void setId(Long id) {
this.id = id;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getHostType() {
return hostType;
}
public void setHostType(int hostType) {
this.hostType = hostType;
}
public long getMhostid() {
return mhostid;
}
public void setMhostid(long mhostid) {
this.mhostid = mhostid;
}
public String getExportRoot() {
return exportRoot;
}
public void setExportRoot(String exportRoot) {
this.exportRoot = exportRoot;
}
public String getUserOnHost() {
return userOnHost;
}
public void setUserOnHost(String userOnHost) {
this.userOnHost = userOnHost;
}
public String getUserPassword() {
return userPassword;
}
public void setUserPassword(String userPassword) {
this.userPassword = userPassword;
}
public MHostVO getMhost() {
return mhost;
}
public void setMhost(MHostVO mhost) {
this.mhost = mhost;
}
public Set<SBucket> getBuckets() {
return buckets;
}
public void setBuckets(Set<SBucket> buckets) {
this.buckets = buckets;
}
public Set<MHostMountVO> getMounts() {
return mounts;
}
public void setMounts(Set<MHostMountVO> mounts) {
this.mounts = mounts;
}
}

View File

@ -1,41 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SMeta" table="meta" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="target">
<column name="Target" />
</property>
<property name="targetId">
<column name="TargetID" />
</property>
<property name="name">
<column name="Name" />
</property>
<property name="value">
<column name="Value" />
</property>
</class>
</hibernate-mapping>

View File

@ -18,18 +18,36 @@ package com.cloud.bridge.model;
import java.io.Serializable;
public class SMeta implements Serializable {
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="meta")
public class SMetaVO implements Serializable {
private static final long serialVersionUID = 7459503272337054283L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="Target")
private String target;
@Column(name="TargetID")
private long targetId;
@Column(name="Name")
private String name;
private String value;
public SMeta() {
}
@Column(name="Value")
private String value;
public SMetaVO() {}
public Long getId() {
return id;
@ -76,11 +94,11 @@ public class SMeta implements Serializable {
if(this == other)
return true;
if(!(other instanceof SMeta))
if(!(other instanceof SMetaVO))
return false;
return getTarget().equals(((SMeta)other).getTarget()) && getTargetId() == ((SMeta)other).getTargetId()
&& getName().equals(((SMeta)other).getName());
return getTarget().equals(((SMetaVO)other).getTarget()) && getTargetId() == ((SMetaVO)other).getTargetId()
&& getName().equals(((SMetaVO)other).getName());
}
@Override

View File

@ -1,59 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SObject" table="sobject" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="nameKey">
<column name="NameKey" />
</property>
<property name="ownerCanonicalId">
<column name="OwnerCanonicalId" />
</property>
<property name="nextSequence">
<column name="NextSequence" />
</property>
<property name="deletionMark">
<column name="DeletionMark" />
</property>
<property name="createTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="CreateTime" />
</property>
<many-to-one name="bucket" column="SBucketID"
class="com.cloud.bridge.model.SBucket"
cascade="none"
/>
<set name="items" inverse="true" lazy="true" cascade="all-delete-orphan">
<key column="SObjectID" />
<one-to-many class="com.cloud.bridge.model.SObjectItem" />
</set>
</class>
</hibernate-mapping>

View File

@ -1,62 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.SObjectItem" table="sobject_item" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="version">
<column name="Version" />
</property>
<property name="md5">
<column name="MD5" />
</property>
<property name="storedPath">
<column name="StoredPath" />
</property>
<property name="storedSize">
<column name="StoredSize" />
</property>
<property name="createTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="CreateTime" />
</property>
<property name="lastModifiedTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="LastModifiedTime" />
</property>
<property name="lastAccessTime" type="com.cloud.bridge.persist.GMTDateTimeUserType">
<column name="LastAccessTime" />
</property>
<many-to-one name="theObject" column="SObjectID"
class="com.cloud.bridge.model.SObject"
cascade="none"
/>
</class>
</hibernate-mapping>

View File

@ -19,22 +19,56 @@ package com.cloud.bridge.model;
import java.io.Serializable;
import java.util.Date;
public class SObjectItem implements Serializable {
private static final long serialVersionUID = -7351173256185687851L;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
@Entity
@Table(name="sobject_item")
public class SObjectItemVO {
private static final long serialVersionUID = -7351173256185687851L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
private SObject theObject;
@Column(name="SObjectID")
private long sObjectID;
@Column(name="Version")
private String version;
@Column(name="MD5")
private String md5;
@Column(name="StoredPath")
private String storedPath;
@Column(name="StoredSize")
private long storedSize;
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
@Column(name="LastModifiedTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date lastModifiedTime;
@Column(name="LastAccessTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date lastAccessTime;
public SObjectItem() {
@Transient
private SObjectVO theObject;
public SObjectItemVO() {
}
public Long getId() {
@ -45,14 +79,22 @@ public class SObjectItem implements Serializable {
this.id = id;
}
public SObject getTheObject() {
public SObjectVO getTheObject() {
return theObject;
}
public void setTheObject(SObject theObject) {
public void setTheObject(SObjectVO theObject) {
this.theObject = theObject;
}
public long getsObjectID() {
return sObjectID;
}
public void setsObjectID(long sObjectID) {
this.sObjectID = sObjectID;
}
public String getVersion() {
return version;
}
@ -114,22 +156,22 @@ public class SObjectItem implements Serializable {
if(this == other)
return true;
if(!(other instanceof SObjectItem))
if(!(other instanceof SObjectItemVO))
return false;
if(version != null) {
if(!version.equals(((SObjectItem)other).getVersion()))
if(!version.equals(((SObjectItemVO)other).getVersion()))
return false;
} else {
if(((SObjectItem)other).getVersion() != null)
if(((SObjectItemVO)other).getVersion() != null)
return false;
}
if(theObject.getId() != null) {
if(!theObject.getId().equals(((SObjectItem)other).getTheObject()))
if(!theObject.getId().equals(((SObjectItemVO)other).getTheObject()))
return false;
} else {
if(((SObjectItem)other).getTheObject() != null)
if(((SObjectItemVO)other).getTheObject() != null)
return false;
}
return true;

View File

@ -16,30 +16,58 @@
// under the License.
package com.cloud.bridge.model;
import java.io.Serializable;
import java.util.Date;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
public class SObject implements Serializable {
private static final long serialVersionUID = 8566744941395660486L;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
import javax.persistence.Temporal;
import javax.persistence.TemporalType;
import javax.persistence.Transient;
@Entity
@Table(name="sobject")
public class SObjectVO {
//private static final long serialVersionUID = 8566744941395660486L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="SBucketID")
private long bucketID;
@Column(name="NameKey")
private String nameKey;
@Column(name="OwnerCanonicalID")
private String ownerCanonicalId;
@Column(name="NextSequence")
private int nextSequence;
private String deletionMark; // This must also a unique ID to give to the REST client
@Column(name="DeletionMark")
private String deletionMark; // This must also a unique ID to give to the REST client
@Column(name="CreateTime")
@Temporal(value=TemporalType.TIMESTAMP)
private Date createTime;
@Transient
private SBucket bucket;
private Set<SObjectItem> items = new HashSet<SObjectItem>();
public SObject() {
@Transient
private Set<SObjectItemVO> items = new HashSet<SObjectItemVO>();
public SObjectVO() {
deletionMark = null;
}
@ -51,6 +79,14 @@ public class SObject implements Serializable {
this.id = id;
}
public long getBucketID() {
return bucketID;
}
public void setBucketID(long bucketID) {
this.bucketID = bucketID;
}
public String getNameKey() {
return nameKey;
}
@ -99,20 +135,20 @@ public class SObject implements Serializable {
this.bucket = bucket;
}
public Set<SObjectItem> getItems() {
public Set<SObjectItemVO> getItems() {
return items;
}
public void setItems(Set<SObjectItem> items) {
public void setItems(Set<SObjectItemVO> items) {
this.items = items;
}
public void deleteItem( long id ) {
Iterator<SObjectItem> it = getItems().iterator();
Iterator<SObjectItemVO> it = getItems().iterator();
while( it.hasNext())
{
SObjectItem oneItem = it.next();
SObjectItemVO oneItem = it.next();
if (id == oneItem.getId()) {
boolean bRemoved = items.remove( oneItem );
System.out.println( "deleteItem from sobject: " + bRemoved );
@ -121,15 +157,15 @@ public class SObject implements Serializable {
}
}
public SObjectItem getLatestVersion( boolean versioningOff ) {
Iterator<SObjectItem> it = getItems().iterator();
public SObjectItemVO getLatestVersion( boolean versioningOff ) {
Iterator<SObjectItemVO> it = getItems().iterator();
int maxVersion = 0;
int curVersion = 0;
SObjectItem latestItem = null;
SObjectItemVO latestItem = null;
while( it.hasNext())
{
SObjectItem item = it.next();
SObjectItemVO item = it.next();
// If versioning is off then return the item with the null version string (if exists)
// For example, the bucket could have allowed versioning and then it was suspended
@ -160,12 +196,12 @@ public class SObject implements Serializable {
* @param wantVersion
* @return
*/
public SObjectItem getVersion( String wantVersion )
public SObjectItemVO getVersion( String wantVersion )
{
Iterator<SObjectItem> it = getItems().iterator();
Iterator<SObjectItemVO> it = getItems().iterator();
while( it.hasNext())
{
SObjectItem item = it.next();
SObjectItemVO item = it.next();
String curVersion = item.getVersion();
if (null != curVersion && wantVersion.equalsIgnoreCase( curVersion )) return item;
}
@ -177,17 +213,17 @@ public class SObject implements Serializable {
if(this == other)
return true;
if(!(other instanceof SObject))
if(!(other instanceof SObjectVO))
return false;
if(!getNameKey().equals(((SObject)other).getNameKey()))
if(!getNameKey().equals(((SObjectVO)other).getNameKey()))
return false;
if(getBucket() != null) {
if(!getBucket().equals(((SObject)other).getBucket()))
if(!getBucket().equals(((SObjectVO)other).getBucket()))
return false;
} else {
if(((SObject)other).getBucket() != null)
if(((SObjectVO)other).getBucket() != null)
return false;
}

View File

@ -1,38 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.bridge.model.UserCredentials" table="usercredentials" lazy="true">
<id name="id" type="long" column="ID" >
<generator class="native" />
</id>
<property name="accessKey">
<column name="AccessKey" />
</property>
<property name="secretKey">
<column name="SecretKey" />
</property>
<property name="certUniqueId">
<column name="CertUniqueId" />
</property>
</class>
</hibernate-mapping>

View File

@ -18,17 +18,39 @@ package com.cloud.bridge.model;
import java.io.Serializable;
public class UserCredentials implements Serializable {
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Table;
@Entity
@Table(name="usercredentials")
public class UserCredentialsVO{
private static final long serialVersionUID = 7459503272337054299L;
@Id
@GeneratedValue(strategy=GenerationType.IDENTITY)
@Column(name="ID")
private Long id;
@Column(name="AccessKey")
private String accessKey;
@Column(name="SecretKey")
private String secretKey;
@Column(name="CertUniqueId")
private String certUniqueId;
public UserCredentials() {
}
public UserCredentialsVO() { }
public UserCredentialsVO(String accessKey, String secretKey) {
this.accessKey = accessKey;
this.secretKey = secretKey;
}
public Long getId() {
return id;
}
@ -65,14 +87,14 @@ public class UserCredentials implements Serializable {
public boolean equals(Object other) {
if (this == other) return true;
if (!(other instanceof UserCredentials)) return false;
if (!(other instanceof UserCredentialsVO)) return false;
// The cert id can be null. The cert is unused in the REST API.
if ( getAccessKey().equals(((UserCredentials)other).getAccessKey()) &&
getSecretKey().equals(((UserCredentials)other).getSecretKey()))
if ( getAccessKey().equals(((UserCredentialsVO)other).getAccessKey()) &&
getSecretKey().equals(((UserCredentialsVO)other).getSecretKey()))
{
String thisCertId = getCertUniqueId();
String otherCertId = ((UserCredentials)other).getCertUniqueId();
String otherCertId = ((UserCredentialsVO)other).getCertUniqueId();
if (null == thisCertId && null == otherCertId) return true;

View File

@ -1,118 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist;
import java.io.Serializable;
import java.util.List;
import org.hibernate.Query;
import org.hibernate.Session;
import com.cloud.bridge.util.QueryHelper;
/**
* Provide methods for getting, saving, deleting or updating state per session or, in a given session, returnin a List in
* response to queryEntities for a particular instantation of the EntityDao generic class, as defined here.
* Any instantation of EntityDao passes in the class for which it is instantiating. For example a new instance of SBucketDao
* passes in com.cloud.bridge.model.SBucket as its clazz.
* Instantiators, providing an Entity definition, are the classes
* MHostDao,
* MHostMountDao,
* SAclDao,
* SBucketDao,
* SHostDao,
* SMetaDao,
* SObjectDao,
* SObjectItemDao,
* CloudStackSvcOfferingDao
*/
public class EntityDao<T> {
private Class<?> clazz;
private boolean isCloudStackSession = false;
// Constructor to implement CloudStackSvcOffering: see class CloudStackSvcOfferingDao
public EntityDao(Class<?> clazz){
this(clazz, false);
}
public EntityDao(Class<?> clazz, boolean isCloudStackSession) {
this.clazz = clazz;
this.isCloudStackSession = isCloudStackSession;
// Note : beginTransaction can be called multiple times
// "If a new underlying transaction is required, begin the transaction. Otherwise continue the new work in the
// context of the existing underlying transaction." from the Hibernate spec
PersistContext.beginTransaction(isCloudStackSession);
}
@SuppressWarnings("unchecked")
public T get(Serializable id) {
Session session = PersistContext.getSession(isCloudStackSession);
return (T)session.get(clazz, id);
}
public T save(T entity) {
Session session = PersistContext.getSession(isCloudStackSession);
session.saveOrUpdate(entity);
return entity;
}
public T update(T entity) {
Session session = PersistContext.getSession(isCloudStackSession);
session.saveOrUpdate(entity);
return entity;
}
public void delete(T entity) {
Session session = PersistContext.getSession(isCloudStackSession);
session.delete(entity);
}
public T queryEntity(String hql, Object[] params) {
Session session = PersistContext.getSession(isCloudStackSession);
Query query = session.createQuery(hql);
query.setMaxResults(1);
QueryHelper.bindParameters(query, params);
return (T)query.uniqueResult();
}
public List<T> queryEntities(String hql, Object[] params) {
Session session = PersistContext.getSession(isCloudStackSession);
Query query = session.createQuery(hql);
QueryHelper.bindParameters(query, params);
return (List<T>)query.list();
}
public List<T> queryEntities(String hql, int offset, int limit, Object[] params) {
Session session = PersistContext.getSession(isCloudStackSession);
Query query = session.createQuery(hql);
QueryHelper.bindParameters(query, params);
query.setFirstResult(offset);
query.setMaxResults(limit);
return (List<T>)query.list();
}
public int executeUpdate(String hql, Object[] params) {
Session session = PersistContext.getSession(isCloudStackSession);
Query query = session.createQuery(hql);
QueryHelper.bindParameters(query, params);
return query.executeUpdate();
}
}

View File

@ -1,102 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist;
import java.io.Serializable;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Date;
import org.hibernate.HibernateException;
import org.hibernate.usertype.UserType;
import com.cloud.bridge.util.DateHelper;
/**
* GMTDateTimeUserType implements a Hibernate user type, it deals with GMT date/time conversion
* between Java Date/Calendar and MySQL DATE types
*/
public class GMTDateTimeUserType implements UserType {
private static final int[] SQL_TYPES = { Types.VARBINARY };
public Class<?> returnedClass() { return Date.class; }
public boolean equals(Object x, Object y) {
if (x == y)
return true;
if (x == null || y == null)
return false;
return x.equals(y);
}
public int hashCode(Object x) {
if(x != null)
return x.hashCode();
return 0;
}
public Object deepCopy(Object value) {
if(value != null)
return ((Date)value).clone();
return null;
}
public boolean isMutable() {
return true;
}
public Object nullSafeGet(ResultSet resultSet, String[] names, Object owner)
throws HibernateException, SQLException {
String dateString = resultSet.getString(names[0]);
if(dateString != null)
return DateHelper.parseDateString(DateHelper.GMT_TIMEZONE, dateString);
return null;
}
public void nullSafeSet(PreparedStatement statement, Object value, int index)
throws HibernateException, SQLException {
if (value == null) {
statement.setNull(index, Types.TIMESTAMP);
} else {
Date dt = (Date)value;
statement.setString(index, DateHelper.getDateDisplayString(DateHelper.GMT_TIMEZONE, dt));
}
}
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return DateHelper.parseDateString(DateHelper.GMT_TIMEZONE, (String)cached);
}
public Serializable disassemble(Object value) throws HibernateException {
return DateHelper.getDateDisplayString(DateHelper.GMT_TIMEZONE, (Date)value);
}
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return ((Date)original).clone();
}
public int[] sqlTypes() {
return SQL_TYPES;
}
}

View File

@ -1,359 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.HashMap;
import java.util.Map;
import org.apache.log4j.Logger;
import org.hibernate.HibernateException;
import org.hibernate.Session;
import org.hibernate.Transaction;
import com.cloud.bridge.util.CloudSessionFactory;
import com.cloud.bridge.util.CloudStackSessionFactory;
import com.cloud.bridge.util.OrderedPair;
/**
*
* We use Per-thread based hibernate session and transaction pattern. Transaction will be
* started implicitly by EntityDao instances and be committed implicitly in the end of
* request-process cycle. All requests are guarded by a dynamic proxy.
*
* We will try to keep transaction management as implicit as we can, so that
* most of service layer implementation contains business-logic only, all business logic are
* built on top of domain object model, and all persistent layer handling lie within persist layer
* in Dao classes.
*
* PersistContext class also provides per-thread based registry service and global named-lock service
*/
public class PersistContext {
protected final static Logger logger = Logger.getLogger(PersistContext.class);
private static final CloudSessionFactory sessionFactory;
private static final ThreadLocal<Session> threadSession = new ThreadLocal<Session>();
private static final ThreadLocal<Transaction> threadTransaction = new ThreadLocal<Transaction>();
private static final ThreadLocal<Map<String, Object>> threadStore = new ThreadLocal<Map<String, Object>>();
private static final CloudStackSessionFactory cloudStackSessionFactory;
private static final ThreadLocal<Session> threadCloudStackSession = new ThreadLocal<Session>();
private static final ThreadLocal<Transaction> threadCloudStackTransaction = new ThreadLocal<Transaction>();
static {
try {
sessionFactory = CloudSessionFactory.getInstance();
cloudStackSessionFactory = CloudStackSessionFactory.getInstance();
} catch(HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
throw new PersistException(e);
}
}
public static Session getSession(boolean cloudStackSession) {
Session s = null;
try {
if(cloudStackSession){
s = threadCloudStackSession.get();
if(s == null) {
s = cloudStackSessionFactory.openSession();
threadCloudStackSession.set(s);
}
}else{
s = threadSession.get();
if(s == null) {
s = sessionFactory.openSession();
threadSession.set(s);
}
}
} catch(HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
throw new PersistException(e);
}
return s;
}
public static Session getSession() {
return getSession(false);
}
public static void closeSession() {
closeSession(false);
}
public static void closeSession(boolean cloudStackSession) {
try {
if(cloudStackSession){
Session s = (Session) threadCloudStackSession.get();
threadCloudStackSession.set(null);
if (s != null && s.isOpen())
s.close();
}else{
Session s = (Session) threadSession.get();
threadSession.set(null);
if (s != null && s.isOpen())
s.close();
}
}catch(HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
throw new PersistException(e);
}
}
public static void beginTransaction(boolean cloudStackTxn) {
Transaction tx = null;
try {
if(cloudStackTxn){
tx = threadCloudStackTransaction.get();
}else{
tx = threadTransaction.get();
}
if (tx == null) {
tx = getSession(cloudStackTxn).beginTransaction();
if(cloudStackTxn){
threadCloudStackTransaction.set(tx);
}else{
threadTransaction.set(tx);
}
}
} catch(HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
throw new PersistException(e);
}
}
public static void beginTransaction() {
beginTransaction(false);
}
public static void commitTransaction(boolean cloudStackTxn) {
Transaction tx = null;
if(cloudStackTxn){
tx = threadCloudStackTransaction.get();
}else{
tx = threadTransaction.get();
}
try {
if ( tx != null && !tx.wasCommitted() && !tx.wasRolledBack() ){
tx.commit();
}
if(cloudStackTxn){
threadCloudStackTransaction.set(null);
}else{
threadTransaction.set(null);
}
} catch (HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
rollbackTransaction(cloudStackTxn);
throw new PersistException(e);
}
}
public static void commitTransaction() {
commitTransaction(false);
}
public static void rollbackTransaction(boolean cloudStackTxn) {
Transaction tx = null;
if(cloudStackTxn){
tx = (Transaction)threadCloudStackTransaction.get();
threadCloudStackTransaction.set(null);
}else{
tx = (Transaction)threadTransaction.get();
threadTransaction.set(null);
}
try {
if ( tx != null && !tx.wasCommitted() && !tx.wasRolledBack() ) {
tx.rollback();
}
} catch (HibernateException e) {
logger.error("Exception " + e.getMessage(), e);
throw new PersistException(e);
} finally {
closeSession(cloudStackTxn);
}
}
public static void rollbackTransaction() {
rollbackTransaction(false);
}
public static void flush() {
commitTransaction();
beginTransaction();
}
/**
* acquireNamedLock/releaseNamedLock must be called in pairs and within the same thread
* they can not be called recursively neither
*
* @param name
* @param timeoutSeconds
* @return
*/
public static boolean acquireNamedLock(String name, int timeoutSeconds) {
Connection jdbcConnection = getJDBCConnection(name, true);
if(jdbcConnection == null) {
logger.warn("Unable to acquire named lock connection for named lock: " + name);
return false;
}
PreparedStatement pstmt = null;
try {
pstmt = jdbcConnection.prepareStatement("SELECT COALESCE(GET_LOCK(?, ?),0)");
pstmt.setString(1, name);
pstmt.setInt(2, timeoutSeconds);
ResultSet rs = pstmt.executeQuery();
if (rs != null && rs.first()) {
if(rs.getInt(1) > 0) {
return true;
} else {
logger.error("GET_LOCK() timed out on lock : " + name);
}
}
} catch (SQLException e) {
logger.error("GET_LOCK() throws exception ", e);
} catch (Throwable e) {
logger.error("GET_LOCK() throws exception ", e);
} finally {
if (pstmt != null) {
try {
pstmt.close();
} catch (SQLException e) {
logger.error("Unexpected exception " + e.getMessage(), e);
}
}
}
releaseJDBCConnection(name);
return false;
}
public static boolean releaseNamedLock(String name) {
Connection jdbcConnection = getJDBCConnection(name, false);
if(jdbcConnection == null) {
logger.error("Unable to acquire DB connection for global lock system");
return false;
}
PreparedStatement pstmt = null;
try {
pstmt = jdbcConnection.prepareStatement("SELECT COALESCE(RELEASE_LOCK(?), 0)");
pstmt.setString(1, name);
ResultSet rs = pstmt.executeQuery();
if(rs != null && rs.first())
return rs.getInt(1) > 0;
logger.error("RELEASE_LOCK() returns unexpected result : " + rs.getInt(1));
} catch (SQLException e) {
logger.error("RELEASE_LOCK() throws exception ", e);
} catch (Throwable e) {
logger.error("RELEASE_LOCK() throws exception ", e);
} finally {
releaseJDBCConnection(name);
}
return false;
}
@SuppressWarnings("deprecation")
private static Connection getJDBCConnection(String name, boolean allocNew) {
String registryKey = "JDBC-Connection." + name;
OrderedPair<Session, Connection> info = (OrderedPair<Session, Connection>)getThreadStoreObject(registryKey);
if(info == null && allocNew) {
Session session = sessionFactory.openSession();
Connection connection = session.connection();
if(connection == null) {
session.close();
return null;
}
try {
connection.setAutoCommit(true);
} catch(SQLException e) {
logger.warn("Unexpected exception " + e.getMessage(), e);
try {
connection.close();
session.close();
} catch(Throwable ex) {
logger.warn("Unexpected exception " + e.getMessage(), e);
}
return null;
}
registerThreadStoreObject(registryKey, new OrderedPair<Session, Connection>(session, connection));
return connection;
}
if(info != null)
return info.getSecond();
return null;
}
private static void releaseJDBCConnection(String name) {
String registryKey = "JDBC-Connection." + name;
OrderedPair<Session, Connection> info = (OrderedPair<Session, Connection>)unregisterThreadStoreObject(registryKey);
if(info != null) {
try {
info.getSecond().close();
info.getFirst().close();
} catch(Throwable e) {
logger.warn("Unexpected exception " + e.getMessage(), e);
}
}
}
public static void registerThreadStoreObject(String name, Object object) {
Map<String, Object> store = getThreadStore();
store.put(name, object);
}
public static Object getThreadStoreObject(String name) {
Map<String, Object> store = getThreadStore();
return store.get(name);
}
public static Object unregisterThreadStoreObject(String name) {
Map<String, Object> store = getThreadStore();
if(store.containsKey(name)) {
Object value = store.get(name);
store.remove(name);
return value;
}
return null;
}
private static Map<String, Object> getThreadStore() {
Map<String, Object> store = threadStore.get();
if(store == null) {
store = new HashMap<String, Object>();
threadStore.set(store);
}
return store;
}
}

View File

@ -1,159 +1,12 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
import com.cloud.bridge.model.BucketPolicyVO;
import com.cloud.utils.db.GenericDao;
import org.apache.log4j.Logger;
public interface BucketPolicyDao extends GenericDao<BucketPolicyVO, Long> {
import com.cloud.bridge.util.ConfigurationHelper;
void deletePolicy(String bucketName);
public class BucketPolicyDao {
public static final Logger logger = Logger.getLogger(BucketPolicyDao.class);
BucketPolicyVO getByName(String bucketName);
private Connection conn = null;
private String dbName = null;
private String dbUser = null;
private String dbPassword = null;
private String dbHost = null;
private String dbPort = null;
public BucketPolicyDao()
{
File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties");
Properties EC2Prop = null;
if (null != propertiesFile) {
EC2Prop = new Properties();
try {
EC2Prop.load( new FileInputStream( propertiesFile ));
} catch (FileNotFoundException e) {
logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e);
} catch (IOException e) {
logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e);
}
dbHost = EC2Prop.getProperty( "db.cloud.host" );
dbName = EC2Prop.getProperty( "db.awsapi.name" );
dbUser = EC2Prop.getProperty( "db.cloud.username" );
dbPassword = EC2Prop.getProperty( "db.cloud.password" );
dbPort = EC2Prop.getProperty( "db.cloud.port" );
}
}
public void addPolicy( String bucketName, String owner, String policy )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
openConnection();
try {
statement = conn.prepareStatement ( "INSERT INTO bucket_policies (BucketName, OwnerCanonicalID, Policy) VALUES (?,?,?)" );
statement.setString( 1, bucketName );
statement.setString( 2, owner );
statement.setString( 3, policy );
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
/**
* Since a bucket policy can exist before its bucket we also need to keep the policy's owner
* so we can restrict who modifies it (because of the "s3:CreateBucket" action).
*/
public String getPolicyOwner( String bucketName )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String owner = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT OwnerCanonicalID FROM bucket_policies WHERE BucketName=?" );
statement.setString( 1, bucketName );
ResultSet rs = statement.executeQuery();
if (rs.next()) owner = rs.getString( "OwnerCanonicalID" );
statement.close();
return owner;
} finally {
closeConnection();
}
}
public String getPolicy( String bucketName )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String policy = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT Policy FROM bucket_policies WHERE BucketName=?" );
statement.setString( 1, bucketName );
ResultSet rs = statement.executeQuery();
if (rs.next()) policy = rs.getString( "Policy" );
statement.close();
return policy;
} finally {
closeConnection();
}
}
public void deletePolicy( String bucketName )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
openConnection();
try {
statement = conn.prepareStatement ( "DELETE FROM bucket_policies WHERE BucketName=?" );
statement.setString( 1, bucketName );
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
private void openConnection()
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
if (null == conn) {
Class.forName( "com.mysql.jdbc.Driver" ).newInstance();
conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName, dbUser, dbPassword );
}
}
private void closeConnection() throws SQLException {
if (null != conn) conn.close();
conn = null;
}
}

View File

@ -0,0 +1,72 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.BucketPolicyVO;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={BucketPolicyDao.class})
public class BucketPolicyDaoImpl extends GenericDaoBase<BucketPolicyVO, Long> implements BucketPolicyDao{
public static final Logger logger = Logger.getLogger(BucketPolicyDaoImpl.class);
public BucketPolicyDaoImpl(){ }
/**
* Since a bucket policy can exist before its bucket we also need to keep the policy's owner
* so we can restrict who modifies it (because of the "s3:CreateBucket" action).
*/
@Override
public BucketPolicyVO getByName( String bucketName ) {
SearchBuilder <BucketPolicyVO> searchByBucket = createSearchBuilder();
searchByBucket.and("BucketName", searchByBucket.entity().getBucketName(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<BucketPolicyVO> sc = searchByBucket.create();
sc.setParameters("BucketName", bucketName);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public void deletePolicy( String bucketName ) {
SearchBuilder <BucketPolicyVO> deleteByBucket = createSearchBuilder();
deleteByBucket.and("BucketName", deleteByBucket.entity().getBucketName(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<BucketPolicyVO> sc = deleteByBucket.create();
sc.setParameters("BucketName", bucketName);
remove(sc);
}finally {
txn.close();
}
}
}

View File

@ -1,35 +1,11 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.CloudStackAccountVO;
import com.cloud.utils.db.GenericDao;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.stack.models.CloudStackAccount;
public interface CloudStackAccountDao extends
GenericDao<CloudStackAccountVO, String> {
String getDefaultZoneId(String accountId);
public class CloudStackAccountDao extends EntityDao<CloudStackAccount> {
public static final Logger logger = Logger.getLogger(CloudStackAccountDao.class);
public CloudStackAccountDao() {
super(CloudStackAccount.class, true);
}
public CloudStackAccount getdefaultZoneId( String id ) {
return queryEntity("from CloudStackAccount where id=?", new Object[] {id});
}
}

View File

@ -0,0 +1,39 @@
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import com.cloud.bridge.model.CloudStackAccountVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={CloudStackAccountDao.class})
public class CloudStackAccountDaoImpl extends GenericDaoBase<CloudStackAccountVO, String> implements CloudStackAccountDao {
@Override
public String getDefaultZoneId(String accountId) {
SearchBuilder<CloudStackAccountVO> SearchByUUID = createSearchBuilder();
Transaction txn = Transaction.open(Transaction.CLOUD_DB);
try {
txn.start();
SearchByUUID.and("uuid", SearchByUUID.entity().getUuid(),
SearchCriteria.Op.EQ);
SearchByUUID.done();
SearchCriteria<CloudStackAccountVO> sc = SearchByUUID.create();
sc.setParameters("uuid", accountId);
CloudStackAccountVO account = findOneBy(sc);
if (null != account)
if(null != account.getDefaultZoneId())
return Long.toString(account.getDefaultZoneId());
return null;
} finally {
txn.commit();
txn.close();
}
}
}

View File

@ -1,42 +1,9 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import org.apache.log4j.Logger;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.stack.models.CloudStackConfiguration;
public class CloudStackConfigurationDao extends EntityDao<CloudStackConfiguration> {
public static final Logger logger = Logger.getLogger(CloudStackConfigurationDao.class);
public CloudStackConfigurationDao() {
super(CloudStackConfiguration.class, true);
}
public String getConfigValue( String configName ){
CloudStackConfiguration config = queryEntity("from CloudStackConfiguration where name=?", new Object[] {configName});
if(config != null){
return config.getValue();
}
return null;
}
import com.cloud.bridge.model.CloudStackConfigurationVO;
import com.cloud.utils.db.GenericDao;
public interface CloudStackConfigurationDao extends GenericDao<CloudStackConfigurationVO, String> {
public String getConfigValue(String name);
}

View File

@ -0,0 +1,45 @@
package com.cloud.bridge.persist.dao;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import javax.ejb.Local;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.CloudStackConfigurationVO;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={CloudStackConfigurationDao.class})
public class CloudStackConfigurationDaoImpl extends GenericDaoBase<CloudStackConfigurationVO, String> implements CloudStackConfigurationDao {
private static final Logger s_logger = Logger.getLogger(CloudStackConfigurationDaoImpl.class);
final SearchBuilder<CloudStackConfigurationVO> NameSearch= createSearchBuilder();
public CloudStackConfigurationDaoImpl() { }
@Override
@DB
public String getConfigValue(String name) {
NameSearch.and("name", NameSearch.entity().getName(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.currentTxn();
try {
txn.start();
SearchCriteria<CloudStackConfigurationVO> sc = NameSearch.create();
sc.setParameters("name", name);
return findOneBy(sc).getValue();
}finally {
}
}
}

View File

@ -1,42 +1,13 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.CloudStackServiceOfferingVO;
import com.cloud.utils.db.GenericDao;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.stack.models.CloudStackConfiguration;
import com.cloud.stack.models.CloudStackServiceOffering;
public interface CloudStackSvcOfferingDao extends GenericDao<CloudStackServiceOfferingVO, String>{
public CloudStackServiceOfferingVO getSvcOfferingByName(String name);
public class CloudStackSvcOfferingDao extends EntityDao<CloudStackServiceOffering> {
public static final Logger logger = Logger.getLogger(CloudStackSvcOfferingDao.class);
public CloudStackServiceOfferingVO getSvcOfferingById(String id);
public CloudStackSvcOfferingDao() {
super(CloudStackServiceOffering.class, true);
}
public CloudStackServiceOffering getSvcOfferingByName( String name ){
return queryEntity("from CloudStackServiceOffering where name=?", new Object[] {name});
}
public CloudStackServiceOffering getSvcOfferingById( String id ){
return queryEntity("from CloudStackServiceOffering where id=?", new Object[] {id});
}
}

View File

@ -0,0 +1,75 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.CloudStackServiceOfferingVO;
import com.cloud.bridge.model.SHostVO;
import com.cloud.stack.models.CloudStackConfiguration;
import com.cloud.stack.models.CloudStackServiceOffering;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={CloudStackSvcOfferingDao.class})
public class CloudStackSvcOfferingDaoImpl extends GenericDaoBase<CloudStackServiceOfferingVO, String> implements CloudStackSvcOfferingDao {
public static final Logger logger = Logger.getLogger(CloudStackSvcOfferingDaoImpl.class);
public CloudStackSvcOfferingDaoImpl() { }
@Override
public CloudStackServiceOfferingVO getSvcOfferingByName( String name ){
SearchBuilder <CloudStackServiceOfferingVO> searchByName = createSearchBuilder();
searchByName.and("name", searchByName.entity().getName(), SearchCriteria.Op.EQ);
searchByName.done();
Transaction txn = Transaction.open(Transaction.CLOUD_DB);
try {
txn.start();
SearchCriteria<CloudStackServiceOfferingVO> sc = searchByName.create();
sc.setParameters("name", name);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public CloudStackServiceOfferingVO getSvcOfferingById( String id ){
SearchBuilder <CloudStackServiceOfferingVO> searchByID = createSearchBuilder();
searchByID.and("id", searchByID.entity().getName(), SearchCriteria.Op.EQ);
searchByID.done();
Transaction txn = Transaction.open(Transaction.CLOUD_DB);
try {
txn.start();
SearchCriteria<CloudStackServiceOfferingVO> sc = searchByID.create();
sc.setParameters("id", id);
return findOneBy(sc);
}finally {
txn.close();
}
}
}

View File

@ -1,30 +1,12 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import com.cloud.bridge.model.MHost;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.model.MHostVO;
import com.cloud.utils.db.GenericDao;
public interface MHostDao extends GenericDao<MHostVO, Long> {
MHostVO getByHostKey(String hostKey);
public void updateHeartBeat(MHostVO mhost);
public class MHostDao extends EntityDao<MHost> {
public MHostDao() {
super(MHost.class);
}
public MHost getByHostKey(String hostKey) {
return queryEntity("from MHost where hostKey=?", new Object[] {hostKey});
}
}

View File

@ -0,0 +1,61 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import com.cloud.bridge.model.MHostVO;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={MHostDao.class})
public class MHostDaoImpl extends GenericDaoBase<MHostVO, Long> implements MHostDao{
final SearchBuilder<MHostVO> NameSearch= createSearchBuilder();
public MHostDaoImpl() {
}
@DB
@Override
public MHostVO getByHostKey(String hostKey) {
NameSearch.and("MHostKey", NameSearch.entity().getHostKey(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MHostVO> sc = NameSearch.create();
sc.setParameters("MHostKey", hostKey);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public void updateHeartBeat(MHostVO mhost) {
Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
update(mhost.getId(), mhost);
txn.commit();
}finally {
txn.close();
}
}
}

View File

@ -1,30 +1,11 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import com.cloud.bridge.model.MHostMount;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.model.MHostMountVO;
import com.cloud.utils.db.GenericDao;
public interface MHostMountDao extends GenericDao<MHostMountVO, Long> {
MHostMountVO getHostMount(long mHostId, long sHostId);
public class MHostMountDao extends EntityDao<MHostMount> {
public MHostMountDao() {
super(MHostMount.class);
}
public MHostMount getHostMount(long mHostId, long sHostId) {
return queryEntity("from MHostMount where mhost=? and shost=?", new Object[] { mHostId, sHostId } );
}
}

View File

@ -0,0 +1,48 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import com.cloud.bridge.model.MHostMountVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={MHostMountDao.class})
public class MHostMountDaoImpl extends GenericDaoBase<MHostMountVO, Long> implements MHostMountDao {
final SearchBuilder<MHostMountVO> SearchByMHostID = createSearchBuilder();
public MHostMountDaoImpl() {
}
@Override
public MHostMountVO getHostMount(long mHostId, long sHostId) {
SearchByMHostID.and("MHostID", SearchByMHostID.entity().getmHostID(), SearchCriteria.Op.EQ);
SearchByMHostID.and("SHostID", SearchByMHostID.entity().getsHostID(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<MHostMountVO> sc = SearchByMHostID.create();
sc.setParameters("MHostID", mHostId);
sc.setParameters("SHostID", sHostId);
return findOneBy(sc);
}finally {
txn.close();
}
}
}

View File

@ -0,0 +1,18 @@
package com.cloud.bridge.persist.dao;
import java.util.List;
import com.cloud.bridge.model.MultiPartPartsVO;
import com.cloud.utils.db.GenericDao;
public interface MultiPartPartsDao extends GenericDao<MultiPartPartsVO, Long> {
List<MultiPartPartsVO> getParts(int uploadId, int maxParts, int startAt);
int getnumParts(int uploadId, int endMarker);
MultiPartPartsVO findByUploadID(int uploadId, int partNumber);
void updateParts(MultiPartPartsVO partVO, int uploadId, int partNumber);
}

View File

@ -0,0 +1,101 @@
package com.cloud.bridge.persist.dao;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.MultiPartPartsVO;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={MultiPartPartsDao.class})
public class MultiPartPartsDaoImpl extends GenericDaoBase<MultiPartPartsVO, Long> implements MultiPartPartsDao {
@Override
public List<MultiPartPartsVO> getParts(int uploadId, int maxParts, int startAt ) {
SearchBuilder<MultiPartPartsVO> ByUploadID = createSearchBuilder();
ByUploadID.and("UploadID", ByUploadID.entity().getUploadid(), SearchCriteria.Op.EQ);
ByUploadID.and("partNumber", ByUploadID.entity().getPartNumber(), SearchCriteria.Op.GT);
ByUploadID.and("partNumber", ByUploadID.entity().getPartNumber(), SearchCriteria.Op.LT);
Filter filter = new Filter(MultiPartPartsVO.class, "partNumber", Boolean.TRUE, null, null);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MultiPartPartsVO> sc = ByUploadID.create();
sc.setParameters("UploadID", new Long(uploadId));
sc.setParameters("partNumber", startAt);
sc.setParameters("partNumber", maxParts);
return listBy(sc, filter);
} finally {
txn.close();
}
}
@Override
public int getnumParts( int uploadId, int endMarker ) {
SearchBuilder<MultiPartPartsVO> byUploadID = createSearchBuilder();
byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ);
byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.GT);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MultiPartPartsVO> sc = byUploadID.create();
sc.setParameters("UploadID", new Long(uploadId));
sc.setParameters("partNumber", endMarker);
return listBy(sc).size();
} finally {
txn.close();
}
}
@Override
public MultiPartPartsVO findByUploadID(int uploadId, int partNumber) {
SearchBuilder<MultiPartPartsVO> byUploadID = createSearchBuilder();
byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ);
byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MultiPartPartsVO> sc = byUploadID.create();
sc.setParameters("UploadID", new Long(uploadId));
sc.setParameters("partNumber", partNumber);
return findOneBy(sc);
} finally {
txn.close();
}
}
@Override
public void updateParts(MultiPartPartsVO partVO, int uploadId, int partNumber) {
SearchBuilder<MultiPartPartsVO> byUploadID = createSearchBuilder();
byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ);
byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MultiPartPartsVO> sc = byUploadID.create();
sc.setParameters("UploadID", new Long(uploadId));
sc.setParameters("partNumber", partNumber);
update(partVO, sc);
txn.commit();
} finally {
txn.close();
}
}
}

View File

@ -0,0 +1,21 @@
package com.cloud.bridge.persist.dao;
import java.util.List;
import com.cloud.bridge.model.MultiPartUploadsVO;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.utils.db.GenericDao;
public interface MultiPartUploadsDao extends
GenericDao<MultiPartUploadsVO, Long> {
OrderedPair<String, String> multipartExits(int uploadId);
void deleteUpload(int uploadId);
String getAtrributeValue(String attribute, int uploadid);
List<MultiPartUploadsVO> getInitiatedUploads(String bucketName,
int maxParts, String prefix, String keyMarker, String uploadIdMarker);
}

View File

@ -0,0 +1,108 @@
package com.cloud.bridge.persist.dao;
import java.util.ArrayList;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.MultiPartPartsVO;
import com.cloud.bridge.model.MultiPartUploadsVO;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.utils.db.Attribute;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={MultiPartUploadsDao.class})
public class MultiPartUploadsDaoImpl extends GenericDaoBase<MultiPartUploadsVO, Long> implements MultiPartUploadsDao {
@Override
public OrderedPair<String,String> multipartExits( int uploadId ) {
MultiPartUploadsVO uploadvo = null;
Transaction txn = null;
try {
txn = Transaction.open(Transaction.AWSAPI_DB);
uploadvo = findById(new Long(uploadId));
if (null != uploadvo)
return new OrderedPair<String,String>(uploadvo.getAccessKey(), uploadvo.getNameKey());
return null;
} finally {
txn.close();
}
}
@Override
public void deleteUpload(int uploadId) {
Transaction txn = null;
try {
txn = Transaction.open(Transaction.AWSAPI_DB);
remove(new Long(uploadId));
txn.commit();
}finally {
txn.close();
}
}
@Override
public String getAtrributeValue(String attribute, int uploadid) {
Transaction txn = null;
MultiPartUploadsVO uploadvo = null;
try {
txn = Transaction.open(Transaction.AWSAPI_DB);
uploadvo = findById(new Long(uploadid));
if (null != uploadvo) {
if ( attribute.equalsIgnoreCase("AccessKey") )
return uploadvo.getAccessKey();
else if ( attribute.equalsIgnoreCase("x_amz_acl") )
return uploadvo.getAmzAcl();
}
return null;
} finally {
txn.close();
}
}
@Override
public List<MultiPartUploadsVO> getInitiatedUploads(String bucketName, int maxParts, String prefix, String keyMarker, String uploadIdMarker) {
List<MultiPartUploadsVO> uploadList = new ArrayList<MultiPartUploadsVO>();
SearchBuilder<MultiPartUploadsVO> byBucket = createSearchBuilder();
byBucket.and("BucketName", byBucket.entity().getBucketName() , SearchCriteria.Op.EQ);
if (null != prefix)
byBucket.and("NameKey", byBucket.entity().getNameKey(), SearchCriteria.Op.LIKE);
if (null != uploadIdMarker)
byBucket.and("NameKey", byBucket.entity().getNameKey(), SearchCriteria.Op.GT);
if (null != uploadIdMarker)
byBucket.and("ID", byBucket.entity().getId(), SearchCriteria.Op.GT);
Filter filter = new Filter(MultiPartUploadsVO.class, "nameKey", Boolean.TRUE, null, null);
filter.addOrderBy(MultiPartUploadsVO.class, "createTime", Boolean.TRUE);
Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<MultiPartUploadsVO> sc = byBucket.create();
sc.setParameters("BucketName", bucketName);
if (null != prefix)
sc.setParameters("NameKey", prefix);
if (null != uploadIdMarker)
sc.setParameters("NameKey", keyMarker);
if (null != uploadIdMarker)
sc.setParameters("ID", uploadIdMarker);
listBy(sc, filter);
}finally {
txn.close();
}
return null;
}
}

View File

@ -34,42 +34,25 @@ import java.util.Properties;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.MultiPartPartsVO;
import com.cloud.bridge.model.MultiPartUploadsVO;
import com.cloud.bridge.model.MultipartMetaVO;
import com.cloud.bridge.service.core.s3.S3MetaDataEntry;
import com.cloud.bridge.service.core.s3.S3MultipartPart;
import com.cloud.bridge.service.core.s3.S3MultipartUpload;
import com.cloud.bridge.util.ConfigurationHelper;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.Transaction;
public class MultipartLoadDao {
public static final Logger logger = Logger.getLogger(MultipartLoadDao.class);
private Connection conn = null;
private String dbName = null;
private String dbUser = null;
private String dbPassword = null;
private String dbHost = null;
private String dbPort = null;
public MultipartLoadDao() {
File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties");
Properties EC2Prop = null;
if (null != propertiesFile) {
EC2Prop = new Properties();
try {
EC2Prop.load( new FileInputStream( propertiesFile ));
} catch (FileNotFoundException e) {
logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e);
} catch (IOException e) {
logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e);
}
dbHost = EC2Prop.getProperty( "db.cloud.host" );
dbName = EC2Prop.getProperty( "db.awsapi.name" );
dbUser = EC2Prop.getProperty( "db.cloud.username" );
dbPassword = EC2Prop.getProperty( "db.cloud.password" );
dbPort = EC2Prop.getProperty( "db.cloud.port" );
}
}
protected final MultipartMetaDao mpartMetaDao = ComponentLocator.inject(MultipartMetaDaoImpl.class);
protected final MultiPartPartsDao mpartPartsDao = ComponentLocator.inject(MultiPartPartsDaoImpl.class);
protected final MultiPartUploadsDao mpartUploadDao = ComponentLocator.inject(MultiPartUploadsDaoImpl.class);
public MultipartLoadDao() {}
/**
* If a multipart upload exists with the uploadId value then return the non-null creators
@ -77,30 +60,13 @@ public class MultipartLoadDao {
*
* @param uploadId
* @return creator of the multipart upload, and NameKey of upload
* @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException
*/
public OrderedPair<String,String> multipartExits( int uploadId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String accessKey = null;
String nameKey = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT AccessKey, NameKey FROM multipart_uploads WHERE ID=?" );
statement.setInt( 1, uploadId );
ResultSet rs = statement.executeQuery();
if ( rs.next()) {
accessKey = rs.getString( "AccessKey" );
nameKey = rs.getString( "NameKey" );
return new OrderedPair<String,String>( accessKey, nameKey );
}
else return null;
} finally {
closeConnection();
}
return mpartUploadDao.multipartExits(uploadId);
}
/**
@ -110,23 +76,9 @@ public class MultipartLoadDao {
*
* @param uploadId
*
* @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException
*/
public void deleteUpload( int uploadId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
openConnection();
try {
statement = conn.prepareStatement ( "DELETE FROM multipart_uploads WHERE ID=?" );
statement.setInt( 1, uploadId );
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
public void deleteUpload( int uploadId ) {
mpartUploadDao.deleteUpload(uploadId);
}
/**
@ -134,26 +86,9 @@ public class MultipartLoadDao {
*
* @param uploadId
* @return the access key value defining the initiator
* @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
*/
public String getInitiator( int uploadId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String initiator = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT AccessKey FROM multipart_uploads WHERE ID=?" );
statement.setInt( 1, uploadId );
ResultSet rs = statement.executeQuery();
if (rs.next()) initiator = rs.getString( "AccessKey" );
statement.close();
return initiator;
} finally {
closeConnection();
}
public String getInitiator( int uploadId ) {
return mpartUploadDao.getAtrributeValue("AccessKey", uploadId);
}
/**
@ -165,47 +100,38 @@ public class MultipartLoadDao {
* @param cannedAccess
*
* @return if positive its the uploadId to be returned to the client
*
* @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException
*
*/
public int initiateUpload( String accessKey, String bucketName, String key, String cannedAccess, S3MetaDataEntry[] meta )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
public int initiateUpload( String accessKey, String bucketName, String key, String cannedAccess, S3MetaDataEntry[] meta ) {
int uploadId = -1;
openConnection();
Transaction txn = null;
try {
Date tod = new Date();
java.sql.Timestamp dateTime = new Timestamp( tod.getTime());
txn = Transaction.open(Transaction.AWSAPI_DB);
Date tod = new Date();
MultiPartUploadsVO uploadVO = new MultiPartUploadsVO(accessKey,
bucketName, key, cannedAccess, tod);
uploadVO = mpartUploadDao.persist(uploadVO);
statement = conn.prepareStatement ( "INSERT INTO multipart_uploads (AccessKey, BucketName, NameKey, x_amz_acl, CreateTime) VALUES (?,?,?,?,?)" );
statement.setString( 1, accessKey );
statement.setString( 2, bucketName );
statement.setString( 3, key );
statement.setString( 4, cannedAccess );
statement.setTimestamp( 5, dateTime );
int count = statement.executeUpdate();
statement.close();
if (null != uploadVO) {
uploadId = uploadVO.getId().intValue();
if (null != meta) {
for (int i = 0; i < meta.length; i++) {
MultipartMetaVO mpartMeta = new MultipartMetaVO();
mpartMeta.setUploadID(uploadId);
S3MetaDataEntry entry = meta[i];
mpartMeta.setName(entry.getName());
mpartMeta.setValue(entry.getValue());
mpartMetaDao.persist(mpartMeta);
}
txn.commit();
}
}
// -> we need the newly entered ID
statement = conn.prepareStatement ( "SELECT ID FROM multipart_uploads WHERE AccessKey=? AND BucketName=? AND NameKey=? AND CreateTime=?" );
statement.setString( 1, accessKey );
statement.setString( 2, bucketName );
statement.setString( 3, key );
statement.setTimestamp( 4, dateTime );
ResultSet rs = statement.executeQuery();
if (rs.next()) {
uploadId = rs.getInt( "ID" );
saveMultipartMeta( uploadId, meta );
}
statement.close();
return uploadId;
} finally {
closeConnection();
txn.close();
}
}
}
/**
* Remember all the individual parts that make up the entire multipart upload so that once
@ -219,49 +145,28 @@ public class MultipartLoadDao {
* @param size
* @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
*/
public void savePart( int uploadId, int partNumber, String md5, String storedPath, int size )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
int id = -1;
int count = 0;
openConnection();
public void savePart( int uploadId, int partNumber, String md5, String storedPath, int size ) {
try {
Date tod = new Date();
java.sql.Timestamp dateTime = new java.sql.Timestamp( tod.getTime());
MultiPartPartsVO partVO = null;
// -> are we doing an update or an insert? (are we over writting an existing entry?)
statement = conn.prepareStatement ( "SELECT ID FROM multipart_parts WHERE UploadID=? AND partNumber=?" );
statement.setInt( 1, uploadId );
statement.setInt( 2, partNumber );
ResultSet rs = statement.executeQuery();
if (rs.next()) id = rs.getInt( "ID" );
statement.close();
partVO = mpartPartsDao.findByUploadID(uploadId, partNumber);
// -> are we doing an update or an insert? (are we over writting an
// existing entry?)
if ( -1 == id )
{
statement = conn.prepareStatement ( "INSERT INTO multipart_parts (UploadID, partNumber, MD5, StoredPath, StoredSize, CreateTime) VALUES (?,?,?,?,?,?)" );
statement.setInt( 1, uploadId );
statement.setInt( 2, partNumber );
statement.setString( 3, md5 );
statement.setString( 4, storedPath );
statement.setInt( 5, size );
statement.setTimestamp( 6, dateTime );
if (null == partVO) {
MultiPartPartsVO part = new MultiPartPartsVO(uploadId,
partNumber, md5, storedPath, size, new Date());
mpartPartsDao.persist(part);
} else {
partVO.setMd5(md5);
partVO.setStoredSize(new Long(size));
partVO.setCreateTime(new Date());
partVO.setUploadid(new Long(uploadId));
partVO.setPartNumber(partNumber);
mpartPartsDao.updateParts(partVO, uploadId, partNumber);
}
else
{ statement = conn.prepareStatement ( "UPDATE multipart_parts SET MD5=?, StoredSize=?, CreateTime=? WHERE UploadId=? AND partNumber=?" );
statement.setString( 1, md5 );
statement.setInt( 2, size );
statement.setTimestamp( 3, dateTime );
statement.setInt( 4, uploadId );
statement.setInt( 5, partNumber );
}
count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
@ -270,24 +175,8 @@ public class MultipartLoadDao {
* @param uploadId
* @return the value defined in the x-amz-acl header or null
*/
public String getCannedAccess( int uploadId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String access = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT x_amz_acl FROM multipart_uploads WHERE ID=?" );
statement.setInt( 1, uploadId );
ResultSet rs = statement.executeQuery();
if (rs.next()) access = rs.getString( "x_amz_acl" );
statement.close();
return access;
} finally {
closeConnection();
}
public String getCannedAccess( int uploadId ) {
return mpartUploadDao.getAtrributeValue("x_amz_acl", uploadId);
}
/**
@ -302,31 +191,25 @@ public class MultipartLoadDao {
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
List<S3MetaDataEntry> metaList = new ArrayList<S3MetaDataEntry>();
PreparedStatement statement = null;
int count = 0;
openConnection();
List<MultipartMetaVO> metaVO;
try {
statement = conn.prepareStatement ( "SELECT Name, Value FROM multipart_meta WHERE UploadID=?" );
statement.setInt( 1, uploadId );
ResultSet rs = statement.executeQuery();
while (rs.next())
{
S3MetaDataEntry oneMeta = new S3MetaDataEntry();
oneMeta.setName( rs.getString( "Name" ));
oneMeta.setValue( rs.getString( "Value" ));
metaList.add( oneMeta );
count++;
}
statement.close();
metaVO = mpartMetaDao.getByUploadID(uploadId);
for (MultipartMetaVO multipartMetaVO : metaVO) {
S3MetaDataEntry oneMeta = new S3MetaDataEntry();
oneMeta.setName( multipartMetaVO.getName());
oneMeta.setValue( multipartMetaVO.getValue());
metaList.add( oneMeta );
count++;
}
if ( 0 == count )
return null;
return null;
else return metaList.toArray(new S3MetaDataEntry[0]);
} finally {
closeConnection();
}
}
@ -346,52 +229,33 @@ public class MultipartLoadDao {
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
S3MultipartUpload[] inProgress = new S3MultipartUpload[maxParts];
PreparedStatement statement = null;
boolean isTruncated = false;
int i = 0;
int pos = 1;
List<MultiPartUploadsVO> uploadList;
// -> SQL like condition requires the '%' as a wildcard marker
if (null != prefix) prefix = prefix + "%";
StringBuffer queryStr = new StringBuffer();
queryStr.append( "SELECT ID, AccessKey, NameKey, CreateTime FROM multipart_uploads WHERE BucketName=? " );
if (null != prefix ) queryStr.append( "AND NameKey like ? " );
if (null != keyMarker ) queryStr.append( "AND NameKey > ? ");
if (null != uploadIdMarker) queryStr.append( "AND ID > ? " );
queryStr.append( "ORDER BY NameKey, CreateTime" );
openConnection();
try {
statement = conn.prepareStatement ( queryStr.toString());
statement.setString( pos++, bucketName );
if (null != prefix ) statement.setString( pos++, prefix );
if (null != keyMarker ) statement.setString( pos++, keyMarker );
if (null != uploadIdMarker) statement.setString( pos, uploadIdMarker );
ResultSet rs = statement.executeQuery();
while (rs.next() && i < maxParts)
{
Calendar tod = Calendar.getInstance();
tod.setTime( rs.getTimestamp( "CreateTime" ));
inProgress[i] = new S3MultipartUpload();
inProgress[i].setId( rs.getInt( "ID" ));
inProgress[i].setAccessKey( rs.getString( "AccessKey" ));
inProgress[i].setLastModified( tod );
inProgress[i].setBucketName( bucketName );
inProgress[i].setKey( rs.getString( "NameKey" ));
i++;
}
if (rs.next()) isTruncated = true;
statement.close();
if (i < maxParts) inProgress = (S3MultipartUpload[])resizeArray(inProgress,i);
return new OrderedPair<S3MultipartUpload[], Boolean>(inProgress, isTruncated);
} finally {
closeConnection();
try {
uploadList = mpartUploadDao.getInitiatedUploads(bucketName, maxParts, prefix, keyMarker, uploadIdMarker);
for (MultiPartUploadsVO uploadsVO : uploadList) {
Calendar tod = Calendar.getInstance();
tod.setTime(uploadsVO.getCreateTime());
inProgress[i] = new S3MultipartUpload();
inProgress[i].setId( uploadsVO.getId().intValue());
inProgress[i].setAccessKey(uploadsVO.getAccessKey());
inProgress[i].setLastModified( tod );
inProgress[i].setBucketName( bucketName );
inProgress[i].setKey(uploadsVO.getNameKey());
i++;
}
if (i < maxParts)
inProgress = (S3MultipartUpload[]) resizeArray(inProgress, i);
return new OrderedPair<S3MultipartUpload[], Boolean>(inProgress,
isTruncated);
}finally {
}
}
@ -411,41 +275,30 @@ public class MultipartLoadDao {
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
S3MultipartPart[] parts = new S3MultipartPart[maxParts];
PreparedStatement statement = null;
int i = 0;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT partNumber, MD5, StoredSize, StoredPath, CreateTime " +
"FROM multipart_parts " +
"WHERE UploadID=? " +
"AND partNumber > ? AND partNumber < ? " +
"ORDER BY partNumber" );
statement.setInt( 1, uploadId );
statement.setInt( 2, startAt );
statement.setInt( 3, startAt + maxParts + 1 );
ResultSet rs = statement.executeQuery();
while (rs.next() && i < maxParts)
{
Calendar tod = Calendar.getInstance();
tod.setTime( rs.getTimestamp( "CreateTime" ));
parts[i] = new S3MultipartPart();
parts[i].setPartNumber( rs.getInt( "partNumber" ));
parts[i].setEtag( rs.getString( "MD5" ).toLowerCase());
parts[i].setLastModified( tod );
parts[i].setSize( rs.getInt( "StoredSize" ));
parts[i].setPath( rs.getString( "StoredPath" ));
i++;
}
statement.close();
List<MultiPartPartsVO> partsVO;
try {
partsVO = mpartPartsDao.getParts(uploadId, startAt + maxParts + 1, startAt);
for (MultiPartPartsVO partVO : partsVO) {
Calendar tod = Calendar.getInstance();
tod.setTime(partVO.getCreateTime());
parts[i] = new S3MultipartPart();
parts[i].setPartNumber(partVO.getPartNumber());
parts[i].setEtag(partVO.getMd5());
parts[i].setLastModified(tod);
parts[i].setSize(partVO.getStoredSize().intValue());
parts[i].setPath(partVO.getStoredPath());
i++;
}
if (i < maxParts) parts = (S3MultipartPart[])resizeArray(parts,i);
return parts;
} finally {
closeConnection();
}
}
@ -457,25 +310,8 @@ public class MultipartLoadDao {
* @return number of parts with partNumber greater than endMarker
* @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
*/
public int numParts( int uploadId, int endMarker )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
int count = 0;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT count(*) FROM multipart_parts WHERE UploadID=? AND partNumber > ?" );
statement.setInt( 1, uploadId );
statement.setInt( 2, endMarker );
ResultSet rs = statement.executeQuery();
if (rs.next()) count = rs.getInt( 1 );
statement.close();
return count;
} finally {
closeConnection();
}
public int numParts( int uploadId, int endMarker ) {
return mpartPartsDao.getnumParts(uploadId, endMarker);
}
/**
@ -485,46 +321,30 @@ public class MultipartLoadDao {
* @param uploadId - defines an in-process multipart upload
* @param meta - an array of meta data to be assocated with the uploadId value
*
* @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException
*/
private void saveMultipartMeta( int uploadId, S3MetaDataEntry[] meta )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
if (null == meta) return;
PreparedStatement statement = null;
private void saveMultipartMeta( int uploadId, S3MetaDataEntry[] meta ) {
if (null == meta) return;
openConnection();
Transaction txn = null;
try {
txn = Transaction.open(Transaction.AWSAPI_DB);
for( int i=0; i < meta.length; i++ )
{
S3MetaDataEntry entry = meta[i];
statement = conn.prepareStatement ( "INSERT INTO multipart_meta (UploadID, Name, Value) VALUES (?,?,?)" );
statement.setInt( 1, uploadId );
statement.setString( 2, entry.getName());
statement.setString( 3, entry.getValue());
int count = statement.executeUpdate();
statement.close();
MultipartMetaVO metaVO = new MultipartMetaVO();
metaVO.setUploadID(uploadId);
metaVO.setName(entry.getName());
metaVO.setValue(entry.getValue());
metaVO=mpartMetaDao.persist(metaVO);
}
txn.commit();
} finally {
closeConnection();
txn.close();
}
}
private void openConnection()
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
if (null == conn) {
Class.forName( "com.mysql.jdbc.Driver" ).newInstance();
conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName, dbUser, dbPassword );
}
}
private void closeConnection() throws SQLException {
if (null != conn) conn.close();
conn = null;
}
/**
/**
* Reallocates an array with a new size, and copies the contents
* of the old array to the new array.
*

View File

@ -0,0 +1,12 @@
package com.cloud.bridge.persist.dao;
import java.util.List;
import com.cloud.bridge.model.MultipartMetaVO;
import com.cloud.utils.db.GenericDao;
public interface MultipartMetaDao extends GenericDao<MultipartMetaVO, Long> {
List<MultipartMetaVO> getByUploadID(long uploadID);
}

View File

@ -0,0 +1,34 @@
package com.cloud.bridge.persist.dao;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.MultipartMetaVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={MultipartMetaDao.class})
public class MultipartMetaDaoImpl extends GenericDaoBase<MultipartMetaVO, Long> implements MultipartMetaDao {
@Override
public List<MultipartMetaVO> getByUploadID (long uploadID) {
SearchBuilder <MultipartMetaVO> searchByUID = createSearchBuilder();
searchByUID.and("UploadID", searchByUID.entity().getUploadID(), SearchCriteria.Op.EQ);
searchByUID.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<MultipartMetaVO> sc = searchByUID.create();
sc.setParameters("UploadID", uploadID);
return listBy(sc);
}finally {
txn.close();
}
}
}

View File

@ -1,169 +1,18 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Properties;
import com.cloud.bridge.model.OfferingBundleVO;
import com.cloud.utils.db.GenericDao;
import org.apache.log4j.Logger;
public interface OfferingDao extends GenericDao<OfferingBundleVO, Long> {
import com.cloud.bridge.util.ConfigurationHelper;
int getOfferingCount();
String getCloudOffering(String amazonEC2Offering);
public class OfferingDao extends BaseDao {
public static final Logger logger = Logger.getLogger(OfferingDao.class);
String getAmazonOffering(String cloudStackOffering);
private Connection conn = null;
public OfferingDao()
{
}
public int getOfferingCount()
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
int result = 0;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT count(*) FROM offering_bundle" );
ResultSet rs = statement.executeQuery();
if (rs.next()) result = rs.getInt(1);
statement.close();
return result;
} finally {
closeConnection();
}
}
public String getCloudOffering( String amazonEC2Offering )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String result = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT CloudStackOffering FROM offering_bundle WHERE AmazonEC2Offering=?" );
statement.setString( 1, amazonEC2Offering );
ResultSet rs = statement.executeQuery();
if (rs.next()) result = rs.getString( "CloudStackOffering" );
statement.close();
return result;
} finally {
closeConnection();
}
}
public String getAmazonOffering( String cloudStackOffering )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
String result = null;
openConnection();
try {
statement = conn.prepareStatement ( "SELECT AmazonEC2Offering FROM offering_bundle WHERE CloudStackOffering=?" );
statement.setString( 1, cloudStackOffering );
ResultSet rs = statement.executeQuery();
if (rs.next()) result = rs.getString( "AmazonEC2Offering" );
statement.close();
return result;
} finally {
closeConnection();
}
}
public void setOfferMapping( String amazonEC2Offering, String cloudStackOffering )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
int id = -1;
int count = 0;
void setOfferMapping(String amazonEC2Offering, String cloudStackOffering);
openConnection();
try {
// -> are we doing an update or an insert? (are we over writing an existing entry?)
statement = conn.prepareStatement ( "SELECT ID FROM offering_bundle WHERE AmazonEC2Offering=?" );
statement.setString( 1, amazonEC2Offering );
ResultSet rs = statement.executeQuery();
if (rs.next()) id = rs.getInt( "ID" );
statement.close();
void deleteOfferMapping(String amazonEC2Offering);
if ( -1 == id )
{
statement = conn.prepareStatement ( "INSERT INTO offering_bundle (AmazonEC2Offering, CloudStackOffering) VALUES (?,?)" );
statement.setString( 1, amazonEC2Offering );
statement.setString( 2, cloudStackOffering );
}
else
{ statement = conn.prepareStatement ( "UPDATE offering_bundle SET CloudStackOffering=? WHERE AmazonEC2Offering=?" );
statement.setString( 1, cloudStackOffering );
statement.setString( 2, amazonEC2Offering );
}
count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
public void deleteOfferMapping( String amazonEC2Offering )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
PreparedStatement statement = null;
openConnection();
try {
statement = conn.prepareStatement ( "DELETE FROM offering_bundle WHERE AmazonEC2Offering=?" );
statement.setString( 1, amazonEC2Offering );
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
private void openConnection()
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException
{
if (null == conn) {
Class.forName( "com.mysql.jdbc.Driver" ).newInstance();
conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + awsapi_dbName, dbUser, dbPassword );
}
}
private void closeConnection() throws SQLException
{
if (null != conn) conn.close();
conn = null;
}
}

View File

@ -0,0 +1,135 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import javax.persistence.Entity;
import javax.persistence.Table;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.OfferingBundleVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={OfferingDao.class})
public class OfferingDaoImpl extends GenericDaoBase<OfferingBundleVO, Long> implements OfferingDao {
public static final Logger logger = Logger.getLogger(OfferingDaoImpl.class);
public OfferingDaoImpl() {}
@Override
public int getOfferingCount() {
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
return listAll().size();
}finally {
txn.close();
}
}
@Override
public String getCloudOffering( String amazonEC2Offering ) {
SearchBuilder <OfferingBundleVO> searchByAmazon = createSearchBuilder();
searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ);
searchByAmazon.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<OfferingBundleVO> sc = searchByAmazon.create();
sc.setParameters("AmazonEC2Offering", amazonEC2Offering);
return findOneBy(sc).getCloudstackOffering();
} finally {
txn.close();
}
}
@Override
public String getAmazonOffering( String cloudStackOffering ) {
SearchBuilder <OfferingBundleVO> searchByAmazon = createSearchBuilder();
searchByAmazon.and("CloudStackOffering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ);
searchByAmazon.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<OfferingBundleVO> sc = searchByAmazon.create();
sc.setParameters("CloudStackOffering", cloudStackOffering);
return findOneBy(sc).getAmazonOffering();
} finally {
txn.close();
}
}
@Override
public void setOfferMapping( String amazonEC2Offering, String cloudStackOffering ) {
SearchBuilder <OfferingBundleVO> searchByAmazon = createSearchBuilder();
searchByAmazon.and("CloudStackOffering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ);
searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getCloudstackOffering() , SearchCriteria.Op.EQ);
searchByAmazon.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
OfferingBundleVO offering = null;
try {
txn.start();
SearchCriteria<OfferingBundleVO> sc = searchByAmazon.create();
sc.setParameters("CloudStackOffering", cloudStackOffering);
sc.setParameters("AmazonEC2Offering", amazonEC2Offering);
offering = findOneBy(sc);
if (null == offering) {
offering = new OfferingBundleVO();
}
offering.setAmazonOffering(amazonEC2Offering);
offering.setCloudstackOffering(cloudStackOffering);
if (null == offering)
offering = persist(offering);
else
update(offering.getID(), offering);
txn.commit();
} finally {
txn.close();
}
}
@Override
public void deleteOfferMapping( String amazonEC2Offering ) {
SearchBuilder <OfferingBundleVO> searchByAmazon = createSearchBuilder();
searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ);
searchByAmazon.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<OfferingBundleVO> sc = searchByAmazon.create();
sc.setParameters("AmazonEC2Offering", amazonEC2Offering);
remove(sc);
txn.commit();
} finally {
txn.close();
}
}
}

View File

@ -1,76 +1,21 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.Date;
import java.util.List;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.model.SAclVO;
import com.cloud.bridge.service.core.s3.S3AccessControlList;
import com.cloud.bridge.service.core.s3.S3Grant;
import com.cloud.utils.db.GenericDao;
public class SAclDao extends EntityDao<SAcl> {
public SAclDao() {
super(SAcl.class);
}
public List<SAcl> listGrants(String target, long targetId) {
return queryEntities("from SAcl where target=? and targetId=? order by grantOrder asc",
new Object[] { target, new Long(targetId)});
}
public interface SAclDao extends GenericDao<SAclVO, Long> {
public List<SAcl> listGrants(String target, long targetId, String userCanonicalId) {
return queryEntities("from SAcl where target=? and targetId=? and granteeCanonicalId=? order by grantOrder asc",
new Object[] { target, new Long(targetId), userCanonicalId });
}
List<SAclVO> listGrants(String target, long targetId, String userCanonicalId);
void save(String target, long targetId, S3AccessControlList acl);
SAcl save(String target, long targetId, S3Grant grant, int grantOrder);
List<SAclVO> listGrants(String target, long targetId);
public void save(String target, long targetId, S3AccessControlList acl) {
// -> the target's ACLs are being redefined
executeUpdate("delete from SAcl where target=? and targetId=?", new Object[] { target, new Long(targetId)});
if(acl != null) {
S3Grant[] grants = acl.getGrants();
if(grants != null && grants.length > 0) {
int grantOrder = 1;
for(S3Grant grant : grants) {
save(target, targetId, grant, grantOrder++);
}
}
}
}
public SAcl save(String target, long targetId, S3Grant grant, int grantOrder) {
SAcl aclEntry = new SAcl();
aclEntry.setTarget(target);
aclEntry.setTargetId(targetId);
aclEntry.setGrantOrder(grantOrder);
int grantee = grant.getGrantee();
aclEntry.setGranteeType(grantee);
aclEntry.setPermission(grant.getPermission());
aclEntry.setGranteeCanonicalId(grant.getCanonicalUserID());
Date ts = new Date();
aclEntry.setCreateTime(ts);
aclEntry.setLastModifiedTime(ts);
PersistContext.getSession().save(aclEntry);
return aclEntry;
}
}

View File

@ -0,0 +1,127 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.Date;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.model.SAclVO;
import com.cloud.bridge.service.core.s3.S3AccessControlList;
import com.cloud.bridge.service.core.s3.S3Grant;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SAclDao.class})
public class SAclDaoImpl extends GenericDaoBase<SAclVO, Long> implements SAclDao {
public SAclDaoImpl() {}
@Override
public List<SAclVO> listGrants(String target, long targetId) {
SearchBuilder<SAclVO> SearchByTarget = createSearchBuilder();
SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ);
SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ);
SearchByTarget.done();
Filter filter = new Filter(SAclVO.class, "grantOrder", Boolean.TRUE, null, null);
Transaction txn = Transaction.open( Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SAclVO> sc = SearchByTarget.create();
sc.setParameters("Target", target);
sc.setParameters("TargetID", targetId);
return listBy(sc, filter);
} finally {
txn.close();
}
}
@Override
public List<SAclVO> listGrants(String target, long targetId, String userCanonicalId) {
SearchBuilder<SAclVO> SearchByAcl = createSearchBuilder();
SearchByAcl.and("Target", SearchByAcl.entity().getTarget(), SearchCriteria.Op.EQ);
SearchByAcl.and("TargetID", SearchByAcl.entity().getTargetId(), SearchCriteria.Op.EQ);
SearchByAcl.and("GranteeCanonicalID", SearchByAcl.entity().getGranteeCanonicalId(), SearchCriteria.Op.EQ);
Filter filter = new Filter(SAclVO.class, "grantOrder", Boolean.TRUE, null, null);
Transaction txn = Transaction.open( Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SAclVO> sc = SearchByAcl.create();
sc.setParameters("Target", target);
sc.setParameters("TargetID", targetId);
sc.setParameters("GranteeCanonicalID", userCanonicalId);
return listBy(sc, filter);
} finally {
txn.close();
}
}
@Override
public void save(String target, long targetId, S3AccessControlList acl) {
SearchBuilder<SAclVO> SearchByTarget = createSearchBuilder();
SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ);
SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SAclVO> sc = SearchByTarget.create();
sc.setParameters("Target", target);
sc.setParameters("TargetID", targetId);
this.remove(sc);
if(acl != null) {
S3Grant[] grants = acl.getGrants();
if(grants != null && grants.length > 0) {
int grantOrder = 1;
for(S3Grant grant : grants) {
save(target, targetId, grant, grantOrder++);
}
}
}
txn.commit();
} finally {
txn.close();
}
}
@Override
public SAcl save(String target, long targetId, S3Grant grant, int grantOrder) {
SAclVO aclEntry = new SAclVO();
aclEntry.setTarget(target);
aclEntry.setTargetId(targetId);
aclEntry.setGrantOrder(grantOrder);
int grantee = grant.getGrantee();
aclEntry.setGranteeType(grantee);
aclEntry.setPermission(grant.getPermission());
aclEntry.setGranteeCanonicalId(grant.getCanonicalUserID());
Date ts = new Date();
aclEntry.setCreateTime(ts);
aclEntry.setLastModifiedTime(ts);
aclEntry = this.persist(aclEntry);
return aclEntry;
}
}

View File

@ -1,37 +1,14 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.List;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.utils.db.GenericDao;
public class SBucketDao extends EntityDao<SBucket> {
public SBucketDao() {
super(SBucket.class);
}
public interface SBucketDao extends GenericDao<SBucketVO, Long> {
SBucketVO getByName(String bucketName);
List<SBucketVO> listBuckets(String canonicalId);
public SBucket getByName(String bucketName) {
return queryEntity("from SBucket where name=?", new Object[] {bucketName});
}
public List<SBucket> listBuckets(String canonicalId) {
return queryEntities("from SBucket where ownerCanonicalId=? order by createTime asc",
new Object[] {canonicalId});
}
}

View File

@ -0,0 +1,72 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SBucketDao.class})
public class SBucketDaoImpl extends GenericDaoBase<SBucketVO, Long> implements SBucketDao {
public SBucketDaoImpl() {
}
@Override
public SBucketVO getByName(String bucketName) {
SearchBuilder<SBucketVO> SearchByName = createSearchBuilder();
SearchByName.and("Name", SearchByName.entity().getName(), SearchCriteria.Op.EQ);
//Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<SBucketVO> sc = SearchByName.create();
sc.setParameters("Name", bucketName);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public List<SBucketVO> listBuckets(String canonicalId) {
SearchBuilder<SBucketVO> ByCanonicalID = createSearchBuilder();
ByCanonicalID.and("OwnerCanonicalID", ByCanonicalID.entity().getOwnerCanonicalId(), SearchCriteria.Op.EQ);
Filter filter = new Filter(SBucketVO.class, "createTime", Boolean.TRUE, null, null);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<SBucketVO> sc = ByCanonicalID.create();
sc.setParameters("OwnerCanonicalID", canonicalId);
return listBy(sc, filter);
}finally {
txn.close();
}
}
}

View File

@ -1,35 +1,12 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import com.cloud.bridge.model.SHost;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.model.SHostVO;
import com.cloud.utils.db.GenericDao;
public interface SHostDao extends GenericDao<SHostVO, Long> {
SHostVO getByHost(String host);
SHostVO getLocalStorageHost(long mhostId, String storageRoot);
public class SHostDao extends EntityDao<SHost> {
public SHostDao() {
super(SHost.class);
}
public SHost getByHost(String host) {
return queryEntity("from SHost where host=?", new Object[] { host });
}
public SHost getLocalStorageHost(long mhostId, String storageRoot) {
return queryEntity("from SHost where mhost=? and exportRoot=?",
new Object[] { new Long(mhostId), storageRoot});
}
}

View File

@ -0,0 +1,67 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import javax.ejb.Local;
import com.cloud.bridge.model.SHostVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SHostDao.class})
public class SHostDaoImpl extends GenericDaoBase<SHostVO, Long> implements SHostDao {
public SHostDaoImpl() {}
@Override
public SHostVO getByHost(String host) {
SearchBuilder <SHostVO> HostSearch = createSearchBuilder();
HostSearch.and("Host", HostSearch.entity().getHost(), SearchCriteria.Op.EQ);
HostSearch.done();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SHostVO> sc = HostSearch.create();
sc.setParameters("Host", host);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public SHostVO getLocalStorageHost(long mhostId, String storageRoot) {
SearchBuilder <SHostVO> LocalStorageHostSearch = createSearchBuilder();
LocalStorageHostSearch.and("MHostID", LocalStorageHostSearch.entity().getMhostid(), SearchCriteria.Op.EQ);
LocalStorageHostSearch.and("ExportRoot", LocalStorageHostSearch.entity().getExportRoot(), SearchCriteria.Op.EQ);
LocalStorageHostSearch.done();
Transaction txn = Transaction.currentTxn();
try {
txn.start();
SearchCriteria<SHostVO> sc = LocalStorageHostSearch.create();
sc.setParameters("MHostID", mhostId);
sc.setParameters("ExportRoot", storageRoot);
return findOneBy(sc);
}finally {
txn.close();
}
}
}

View File

@ -1,55 +1,17 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.List;
import com.cloud.bridge.model.SMeta;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.model.SMetaVO;
import com.cloud.bridge.service.core.s3.S3MetaDataEntry;
import com.cloud.utils.db.GenericDao;
public class SMetaDao extends EntityDao<SMeta> {
public SMetaDao() {
super(SMeta.class);
}
public List<SMeta> getByTarget(String target, long targetId) {
return queryEntities("from SMeta where target=? and targetId=?", new Object[] {target, targetId});
}
public interface SMetaDao extends GenericDao<SMetaVO, Long> {
public SMeta save(String target, long targetId, S3MetaDataEntry entry) {
SMeta meta = new SMeta();
meta.setTarget(target);
meta.setTargetId(targetId);
meta.setName(entry.getName());
meta.setValue(entry.getValue());
PersistContext.getSession().save(meta);
return meta;
}
public void save(String target, long targetId, S3MetaDataEntry[] entries) {
// To redefine the target's metadaa
executeUpdate("delete from SMeta where target=? and targetId=?", new Object[] { target, new Long(targetId)});
List<SMetaVO> getByTarget(String target, long targetId);
SMetaVO save(String target, long targetId, S3MetaDataEntry entry);
void save(String target, long targetId, S3MetaDataEntry[] entries);
if(entries != null) {
for(S3MetaDataEntry entry : entries)
save(target, targetId, entry);
}
}
}

View File

@ -0,0 +1,88 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.SMetaVO;
import com.cloud.bridge.service.core.s3.S3MetaDataEntry;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SMetaDao.class})
public class SMetaDaoImpl extends GenericDaoBase<SMetaVO, Long> implements SMetaDao {
public SMetaDaoImpl() {}
@Override
public List<SMetaVO> getByTarget(String target, long targetId) {
SearchBuilder <SMetaVO> SearchByTarget = createSearchBuilder();
SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ);
SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ);
SearchByTarget.done();
Transaction txn = Transaction.open( Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SMetaVO> sc = SearchByTarget.create();
sc.setParameters("Target", target);
sc.setParameters("TargetID", targetId);
return listBy(sc);
} finally {
txn.close();
}
}
@Override
public SMetaVO save(String target, long targetId, S3MetaDataEntry entry) {
SMetaVO meta = new SMetaVO();
meta.setTarget(target);
meta.setTargetId(targetId);
meta.setName(entry.getName());
meta.setValue(entry.getValue());
meta = this.persist(meta);
return meta;
}
@Override
public void save(String target, long targetId, S3MetaDataEntry[] entries) {
// To redefine the target's metadaa
SearchBuilder <SMetaVO> SearchByTarget = createSearchBuilder();
SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ);
SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SMetaVO> sc = SearchByTarget.create();
sc.setParameters("Target", target);
sc.setParameters("TargetID", targetId);
this.remove(sc);
if(entries != null) {
for(S3MetaDataEntry entry : entries)
save(target, targetId, entry);
}
txn.commit();
}finally {
txn.close();
}
}
}

View File

@ -1,76 +1,19 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.ArrayList;
import java.util.List;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.model.SObject;
import com.cloud.bridge.persist.EntityDao;
import com.cloud.bridge.util.EntityParam;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.bridge.model.SObjectVO;
import com.cloud.utils.db.GenericDao;
public class SObjectDao extends EntityDao<SObject> {
public SObjectDao() {
super(SObject.class);
}
public interface SObjectDao extends GenericDao<SObjectVO, Long> {
public SObject getByNameKey(SBucket bucket, String nameKey) {
return queryEntity("from SObject where bucket=? and nameKey=?",
new Object[] { new EntityParam(bucket), nameKey });
}
public List<SObject> listBucketObjects(SBucket bucket, String prefix, String marker, int maxKeys) {
StringBuffer sb = new StringBuffer();
List<Object> params = new ArrayList<Object>();
List<SObjectVO> listBucketObjects(SBucketVO bucket, String prefix,
String marker, int maxKeys);
sb.append("from SObject o left join fetch o.items where deletionMark is null and o.bucket=?");
params.add(new EntityParam(bucket));
if(prefix != null && !prefix.isEmpty()) {
sb.append(" and o.nameKey like ?");
params.add(new String(prefix + "%"));
}
if(marker != null && !marker.isEmpty()) {
sb.append(" and o.nameKey > ?");
params.add(marker);
}
return queryEntities(sb.toString(), 0, maxKeys, params.toArray());
}
public List<SObject> listAllBucketObjects(SBucket bucket, String prefix, String marker, int maxKeys) {
StringBuffer sb = new StringBuffer();
List<Object> params = new ArrayList<Object>();
List<SObjectVO> listAllBucketObjects(SBucketVO bucket, String prefix,
String marker, int maxKeys);
SObjectVO getByNameKey(SBucketVO bucket, String nameKey);
sb.append("from SObject o left join fetch o.items where o.bucket=?");
params.add(new EntityParam(bucket));
if(prefix != null && !prefix.isEmpty()) {
sb.append(" and o.nameKey like ?");
params.add(new String(prefix + "%"));
}
if(marker != null && !marker.isEmpty()) {
sb.append(" and o.nameKey > ?");
params.add(marker);
}
return queryEntities(sb.toString(), 0, maxKeys, params.toArray());
}
}

View File

@ -0,0 +1,119 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.ejb.Local;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.bridge.model.SObjectItemVO;
import com.cloud.bridge.model.SObjectVO;
import com.cloud.bridge.util.EntityParam;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.Filter;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SObjectDao.class})
public class SObjectDaoImpl extends GenericDaoBase<SObjectVO, Long> implements SObjectDao {
protected final SObjectItemDao itemDao = ComponentLocator.inject(SObjectItemDaoImpl.class);
public SObjectDaoImpl() {}
@Override
public SObjectVO getByNameKey(SBucketVO bucket, String nameKey) {
SObjectVO object = null;
SearchBuilder<SObjectVO> SearchByName = createSearchBuilder();
SearchByName.and("SBucketID", SearchByName.entity().getBucketID() , SearchCriteria.Op.EQ);
SearchByName.and("NameKey", SearchByName.entity().getNameKey() , SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<SObjectVO> sc = SearchByName.create();
sc.setParameters("SBucketID", bucket.getId());
sc.setParameters("NameKey", nameKey);
object = findOneBy(sc);
if (null != object) {
Set<SObjectItemVO> items = new HashSet<SObjectItemVO>(
itemDao.getItems(object.getId()));
object.setItems(items);
}
return object;
}finally {
txn.close();
}
}
@Override
public List<SObjectVO> listBucketObjects(SBucketVO bucket, String prefix, String marker, int maxKeys) {
StringBuffer sb = new StringBuffer();
List<Object> params = new ArrayList<Object>();
SearchBuilder<SObjectVO> SearchByBucket = createSearchBuilder();
List<SObjectVO> objects = new ArrayList<SObjectVO>();
SearchByBucket.and("SBucketID", SearchByBucket.entity().getBucketID(), SearchCriteria.Op.EQ);
SearchByBucket.and("DeletionMark", SearchByBucket.entity().getDeletionMark(), SearchCriteria.Op.NULL);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<SObjectVO> sc = SearchByBucket.create();
sc.setParameters("SBucketID", bucket.getId());
objects = listBy(sc);
for (SObjectVO sObjectVO : objects) {
Set<SObjectItemVO> items = new HashSet<SObjectItemVO>(itemDao.getItems(sObjectVO.getId()));
sObjectVO.setItems(items);
}
return objects;
}finally {
txn.close();
}
}
@Override
public List<SObjectVO> listAllBucketObjects(SBucketVO bucket, String prefix, String marker, int maxKeys) {
StringBuffer sb = new StringBuffer();
List<Object> params = new ArrayList<Object>();
SearchBuilder<SObjectVO> getAllBuckets = createSearchBuilder();
List<SObjectVO> objects = new ArrayList<SObjectVO>();
getAllBuckets.and("SBucketID", getAllBuckets.entity().getBucketID(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
txn.start();
SearchCriteria<SObjectVO> sc = getAllBuckets.create();
sc.setParameters("SBucketID", bucket.getId());
objects = listBy(sc);
for (SObjectVO sObjectVO : objects) {
Set<SObjectItemVO> items = new HashSet<SObjectItemVO>(itemDao.getItems(sObjectVO.getId()));
sObjectVO.setItems(items);
}
return objects;
}finally {
txn.close();
}
}
}

View File

@ -1,30 +1,14 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import com.cloud.bridge.model.SObjectItem;
import com.cloud.bridge.persist.EntityDao;
import java.util.List;
import com.cloud.bridge.model.SObjectItemVO;
import com.cloud.utils.db.GenericDao;
public interface SObjectItemDao extends GenericDao<SObjectItemVO, Long> {
SObjectItemVO getByObjectIdNullVersion(long id);
List<SObjectItemVO> getItems(long sobjectID);
public class SObjectItemDao extends EntityDao<SObjectItem> {
public SObjectItemDao() {
super(SObjectItem.class);
}
public SObjectItem getByObjectIdNullVersion(long id) {
return queryEntity("from SObjectItem where theObject=? and version is null", new Object[] { id });
}
}

View File

@ -0,0 +1,71 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.util.List;
import javax.ejb.Local;
import com.cloud.bridge.model.SObjectItemVO;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={SObjectItemDao.class})
public class SObjectItemDaoImpl extends GenericDaoBase<SObjectItemVO, Long> implements SObjectItemDao {
public SObjectItemDaoImpl() {
}
@Override
public SObjectItemVO getByObjectIdNullVersion(long id) {
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
SearchBuilder <SObjectItemVO> SearchByID = createSearchBuilder();
SearchByID.and("ID", SearchByID.entity().getId(), SearchCriteria.Op.EQ);
try {
txn.start();
SearchCriteria<SObjectItemVO> sc = SearchByID.create();
sc.setParameters("ID", id);
return findOneBy(sc);
}finally {
txn.close();
}
}
@Override
public List<SObjectItemVO> getItems(long sobjectID) {
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
SearchBuilder<SObjectItemVO> SearchBySobjectID = createSearchBuilder();
SearchBySobjectID.and("SObjectID", SearchBySobjectID.entity().getId(), SearchCriteria.Op.EQ);
try {
txn.start();
SearchCriteria<SObjectItemVO> sc = SearchBySobjectID.create();
sc.setParameters("SObjectID", sobjectID);
return listBy(sc);
//findOneIncludingRemovedBy(sc);
} finally {
txn.close();
}
}
}

View File

@ -1,169 +1,12 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.sql.*;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.utils.db.GenericDao;
import org.apache.log4j.Logger;
public interface UserCredentialsDao extends GenericDao<UserCredentialsVO, Long> {
import com.cloud.bridge.model.UserCredentials;
import com.cloud.bridge.service.exception.NoSuchObjectException;
UserCredentialsVO getByAccessKey(String cloudAccessKey);
UserCredentialsVO getByCertUniqueId(String certId);
public class UserCredentialsDao extends BaseDao{
public static final Logger logger = Logger.getLogger(UserCredentialsDao.class);
private Connection conn = null;
public UserCredentialsDao() {
}
public void setUserKeys( String cloudAccessKey, String cloudSecretKey )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
UserCredentials user = getByAccessKey( cloudAccessKey );
PreparedStatement statement = null;
openConnection();
try {
if ( null == user ) {
// -> do an insert since the user does not exist yet
statement = conn.prepareStatement ( "INSERT INTO usercredentials (AccessKey, SecretKey) VALUES(?,?)" );
statement.setString( 1, cloudAccessKey );
statement.setString( 2, cloudSecretKey );
}
else {
// -> do an update since the user exists
statement = conn.prepareStatement ( "UPDATE usercredentials SET SecretKey=? WHERE AccessKey=?" );
statement.setString( 1, cloudSecretKey );
statement.setString( 2, cloudAccessKey );
}
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
public void setCertificateId( String cloudAccessKey, String certId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
UserCredentials user = getByAccessKey( cloudAccessKey );
PreparedStatement statement = null;
if (null == user) throw new NoSuchObjectException( "Cloud API Access Key [" + cloudAccessKey + "] is unknown" );
openConnection();
try {
statement = conn.prepareStatement ( "UPDATE usercredentials SET CertUniqueId=? WHERE AccessKey=?" );
statement.setString( 1, certId );
statement.setString( 2, cloudAccessKey );
int count = statement.executeUpdate();
statement.close();
} finally {
closeConnection();
}
}
public UserCredentials getByAccessKey( String cloudAccessKey )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
openConnection();
UserCredentials user = null;
try {
PreparedStatement statement = conn.prepareStatement ( "SELECT SecretKey, CertUniqueId FROM usercredentials WHERE AccessKey=?" );
statement.setString( 1, cloudAccessKey );
statement.executeQuery();
ResultSet rs = statement.getResultSet ();
if (rs.next()) {
user = new UserCredentials();
user.setAccessKey( cloudAccessKey );
user.setSecretKey( rs.getString( "SecretKey" ));
user.setCertUniqueId( rs.getString( "CertUniqueId" ));
}
} finally {
closeConnection();
}
return user;
}
public UserCredentials getByCertUniqueId( String certId )
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
openConnection();
UserCredentials user = null;
try {
PreparedStatement statement = conn.prepareStatement ( "SELECT AccessKey, SecretKey FROM usercredentials WHERE CertUniqueId=?" );
statement.setString( 1, certId );
statement.executeQuery();
ResultSet rs = statement.getResultSet ();
if (rs.next()) {
user = new UserCredentials();
user.setAccessKey( rs.getString( "AccessKey" ));
user.setSecretKey( rs.getString( "SecretKey" ));
user.setCertUniqueId( certId );
}
} finally {
closeConnection();
}
return user;
}
private void openConnection()
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
if (null == conn) {
Class.forName( "com.mysql.jdbc.Driver" ).newInstance();
conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + "/" + awsapi_dbName, dbUser, dbPassword );
}
}
private void closeConnection() throws SQLException {
if (null != conn) conn.close();
conn = null;
}
public static void preCheckTableExistence() throws Exception{
UserCredentialsDao dao = new UserCredentialsDao();
dao.checkTableExistence();
}
private void checkTableExistence() throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
openConnection();
try {
PreparedStatement statement = conn.prepareStatement ( "SELECT * FROM usercredentials " );
statement.executeQuery();
ResultSet rs = statement.getResultSet ();
if (rs.next()) {
return;
}
return;
} catch(Exception e) {
Statement statement = conn.createStatement();
statement.execute( "create table usercredentials(id integer auto_increment primary key, AccessKey varchar(1000), SecretKey varchar(1000), CertUniqueId varchar(1000))" );
statement.close();
}
finally{
closeConnection();
}
}
}

View File

@ -0,0 +1,73 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.persist.dao;
import java.sql.*;
import javax.ejb.Local;
import org.apache.log4j.Logger;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.Transaction;
@Local(value={UserCredentialsDao.class})
public class UserCredentialsDaoImpl extends GenericDaoBase<UserCredentialsVO, Long> implements UserCredentialsDao {
public static final Logger logger = Logger.getLogger(UserCredentialsDaoImpl.class);
public UserCredentialsDaoImpl() {}
@DB
@Override
public UserCredentialsVO getByAccessKey( String cloudAccessKey ) {
SearchBuilder<UserCredentialsVO> SearchByAccessKey = createSearchBuilder();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchByAccessKey.and("AccessKey", SearchByAccessKey.entity()
.getAccessKey(), SearchCriteria.Op.EQ);
SearchByAccessKey.done();
SearchCriteria<UserCredentialsVO> sc = SearchByAccessKey.create();
sc.setParameters("AccessKey", cloudAccessKey);
return findOneBy(sc);
}finally {
txn.commit();
txn.close();
}
}
@Override
public UserCredentialsVO getByCertUniqueId( String certId ) {
SearchBuilder<UserCredentialsVO> SearchByCertID = createSearchBuilder();
SearchByCertID.and("CertUniqueId", SearchByCertID.entity().getCertUniqueId(), SearchCriteria.Op.EQ);
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
SearchCriteria<UserCredentialsVO> sc = SearchByCertID.create();
sc.setParameters("CertUniqueId", certId);
return findOneBy(sc);
}finally {
txn.close();
}
}
}

View File

@ -27,12 +27,18 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.log4j.Logger;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.persist.dao.CloudStackConfigurationDao;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.persist.dao.CloudStackConfigurationDaoImpl;
import com.cloud.bridge.util.ConfigurationHelper;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.component.Inject;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Transaction;
import net.sf.ehcache.Cache;
@DB
public class EC2MainServlet extends HttpServlet{
private static final long serialVersionUID = 2201599478145974479L;
@ -41,23 +47,23 @@ public class EC2MainServlet extends HttpServlet{
public static final String EC2_SOAP_SERVLET_PATH="/services/AmazonEC2/";
public static final String ENABLE_EC2_API="enable.ec2.api";
private static boolean isEC2APIEnabled = false;
public static final Logger logger = Logger.getLogger(EC2MainServlet.class);
CloudStackConfigurationDao csDao = ComponentLocator.inject(CloudStackConfigurationDaoImpl.class);
/**
* We build the path to where the keystore holding the WS-Security X509 certificates
* are stored.
*/
@DB
public void init( ServletConfig config ) throws ServletException {
try{
ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext());
UserCredentialsDao.preCheckTableExistence();
ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext());
// check if API is enabled
CloudStackConfigurationDao csDao = new CloudStackConfigurationDao();
String value = csDao.getConfigValue(ENABLE_EC2_API);
if(value != null){
isEC2APIEnabled = Boolean.valueOf(value);
}
PersistContext.commitTransaction(true);
PersistContext.closeSession(true);
logger.info("Value of EC2 API Flag ::" + value);
}catch(Exception e){
throw new ServletException("Error initializing awsapi: " + e.getMessage());
}

View File

@ -94,10 +94,9 @@ import com.amazon.ec2.RunInstancesResponse;
import com.amazon.ec2.StartInstancesResponse;
import com.amazon.ec2.StopInstancesResponse;
import com.amazon.ec2.TerminateInstancesResponse;
import com.cloud.bridge.model.UserCredentials;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.persist.dao.OfferingDao;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.bridge.persist.dao.OfferingDaoImpl;
import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl;
import com.cloud.bridge.service.controller.s3.ServiceProvider;
import com.cloud.bridge.service.core.ec2.EC2AssociateAddress;
import com.cloud.bridge.service.core.ec2.EC2AuthorizeRevokeSecurityGroup;
@ -140,11 +139,15 @@ import com.cloud.bridge.util.AuthenticationUtils;
import com.cloud.bridge.util.ConfigurationHelper;
import com.cloud.bridge.util.EC2RestAuth;
import com.cloud.stack.models.CloudStackAccount;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.Transaction;
public class EC2RestServlet extends HttpServlet {
private static final long serialVersionUID = -6168996266762804888L;
protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class);
protected final OfferingDaoImpl ofDao = ComponentLocator.inject(OfferingDaoImpl.class);
public static final Logger logger = Logger.getLogger(EC2RestServlet.class);
@ -278,8 +281,6 @@ public class EC2RestServlet extends HttpServlet {
logger.error("Unsupported action " + action);
throw new EC2ServiceException(ClientError.Unsupported, "This operation is not available");
}
PersistContext.commitTransaction();
PersistContext.commitTransaction(true);
} catch( EC2ServiceException e ) {
response.setStatus(e.getErrorCode());
@ -306,8 +307,6 @@ public class EC2RestServlet extends HttpServlet {
} catch (IOException e) {
logger.error("Unexpected exception " + e.getMessage(), e);
}
PersistContext.closeSession();
PersistContext.closeSession(true);
}
}
@ -343,7 +342,7 @@ public class EC2RestServlet extends HttpServlet {
private void setUserKeys( HttpServletRequest request, HttpServletResponse response ) {
String[] accessKey = null;
String[] secretKey = null;
Transaction txn = null;
try {
// -> all these parameters are required
accessKey = request.getParameterValues( "accesskey" );
@ -369,15 +368,20 @@ public class EC2RestServlet extends HttpServlet {
UserContext context = UserContext.current();
try {
txn = Transaction.open(Transaction.AWSAPI_DB);
// -> use the keys to see if the account actually exists
ServiceProvider.getInstance().getEC2Engine().validateAccount( accessKey[0], secretKey[0] );
UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setUserKeys( accessKey[0], secretKey[0] );
/* UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setUserKeys( );
*/ UserCredentialsVO user = new UserCredentialsVO(accessKey[0], secretKey[0]);
ucDao.persist(user);
txn.commit();
} catch( Exception e ) {
logger.error("SetUserKeys " + e.getMessage(), e);
response.setStatus(401);
endResponse(response, e.toString());
txn.close();
return;
}
response.setStatus(200);
@ -402,6 +406,7 @@ public class EC2RestServlet extends HttpServlet {
*/
private void setCertificate( HttpServletRequest request, HttpServletResponse response )
throws Exception {
Transaction txn = null;
try {
// [A] Pull the cert and cloud AccessKey from the request
String[] certificate = request.getParameterValues( "cert" );
@ -437,10 +442,16 @@ public class EC2RestServlet extends HttpServlet {
// [C] Associate the cert's uniqueId with the Cloud API keys
String uniqueId = AuthenticationUtils.X509CertUniqueId( userCert );
logger.debug( "SetCertificate, uniqueId: " + uniqueId );
UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setCertificateId( accessKey[0], uniqueId );
response.setStatus(200);
/* UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setCertificateId( accessKey[0], uniqueId );
*/
txn = Transaction.open(Transaction.AWSAPI_DB);
UserCredentialsVO user = ucDao.getByAccessKey(accessKey[0]);
user.setCertUniqueId(uniqueId);
ucDao.update(user.getId(), user);
response.setStatus(200);
endResponse(response, "User certificate set successfully");
txn.commit();
} catch( NoSuchObjectException e ) {
logger.error("SetCertificate exception " + e.getMessage(), e);
@ -449,7 +460,10 @@ public class EC2RestServlet extends HttpServlet {
} catch( Exception e ) {
logger.error("SetCertificate exception " + e.getMessage(), e);
response.sendError(500, "SetCertificate exception " + e.getMessage());
} finally {
txn.close();
}
}
/**
@ -464,7 +478,8 @@ public class EC2RestServlet extends HttpServlet {
* algorithm.
*/
private void deleteCertificate( HttpServletRequest request, HttpServletResponse response )
throws Exception {
throws Exception {
Transaction txn = null;
try {
String [] accessKey = request.getParameterValues( "AWSAccessKeyId" );
if ( null == accessKey || 0 == accessKey.length ) {
@ -483,10 +498,16 @@ public class EC2RestServlet extends HttpServlet {
certStore.store( fsOut, keystorePassword.toCharArray());
// -> dis-associate the cert's uniqueId with the Cloud API keys
UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setCertificateId( accessKey[0], null );
/* UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setCertificateId( accessKey[0], null );
*/ txn = Transaction.open(Transaction.AWSAPI_DB);
UserCredentialsVO user = ucDao.getByAccessKey(accessKey[0]);
user.setCertUniqueId(null);
ucDao.update(user.getId(), user);
response.setStatus(200);
endResponse(response, "User certificate deleted successfully");
endResponse(response, "User certificate deleted successfully");
txn.commit();
}
else response.setStatus(404);
@ -497,6 +518,8 @@ public class EC2RestServlet extends HttpServlet {
} catch( Exception e ) {
logger.error("DeleteCertificate exception " + e.getMessage(), e);
response.sendError(500, "DeleteCertificate exception " + e.getMessage());
} finally {
txn.close();
}
}
@ -547,7 +570,7 @@ public class EC2RestServlet extends HttpServlet {
}
try {
OfferingDao ofDao = new OfferingDao();
ofDao.setOfferMapping( amazonOffer, cloudOffer );
} catch( Exception e ) {
@ -596,9 +619,7 @@ public class EC2RestServlet extends HttpServlet {
}
try {
OfferingDao ofDao = new OfferingDao();
ofDao.deleteOfferMapping( amazonOffer );
} catch( Exception e ) {
logger.error("DeleteOfferMapping " + e.getMessage(), e);
response.setStatus(401);
@ -1695,8 +1716,8 @@ public class EC2RestServlet extends HttpServlet {
}
// [B] Use the cloudAccessKey to get the users secret key in the db
UserCredentialsDao credentialDao = new UserCredentialsDao();
UserCredentials cloudKeys = credentialDao.getByAccessKey( cloudAccessKey );
UserCredentialsVO cloudKeys = ucDao.getByAccessKey( cloudAccessKey );
if ( null == cloudKeys )
{
logger.debug( cloudAccessKey + " is not defined in the EC2 service - call SetUserKeys" );

View File

@ -259,6 +259,10 @@ public class EC2SoapServiceImpl implements AmazonEC2SkeletonInterface {
List<String> resourceTypeList = new ArrayList<String>();
if (items != null) {
for( int i=0; i < items.length; i++ ) {
if (!items[i].getResourceId().contains(":") || items[i].getResourceId().split(":").length != 2) {
throw new EC2ServiceException( ClientError.InvalidResourceId_Format,
"Invalid Format. ResourceId format is resource-type:resource-uuid");
}
String resourceType = items[i].getResourceId().split(":")[0];
if (resourceTypeList.isEmpty())
resourceTypeList.add(resourceType);
@ -1374,7 +1378,7 @@ public class EC2SoapServiceImpl implements AmazonEC2SkeletonInterface {
param7.setPrivateDnsName( "" );
param7.setDnsName( "" );
param7.setReason( "" );
param7.setKeyName( "" );
param7.setKeyName( inst.getKeyPairName());
param7.setAmiLaunchIndex( "" );
param7.setInstanceType( inst.getServiceOffering());
@ -1696,7 +1700,7 @@ public class EC2SoapServiceImpl implements AmazonEC2SkeletonInterface {
param7.setPrivateDnsName( "" );
param7.setDnsName( "" );
param7.setReason( "" );
param7.setKeyName( "" );
param7.setKeyName( inst.getKeyPairName());
param7.setAmiLaunchIndex( "" );
ProductCodesSetType param9 = new ProductCodesSetType();

View File

@ -43,9 +43,12 @@ import org.w3c.dom.NodeList;
import com.cloud.bridge.io.MultiPartDimeInputStream;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.bridge.persist.dao.CloudStackConfigurationDao;
import com.cloud.bridge.persist.dao.CloudStackConfigurationDaoImpl;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl;
import com.cloud.bridge.service.controller.s3.S3BucketAction;
import com.cloud.bridge.service.controller.s3.S3ObjectAction;
import com.cloud.bridge.service.controller.s3.ServiceProvider;
@ -57,26 +60,29 @@ import com.cloud.bridge.service.core.s3.S3Grant;
import com.cloud.bridge.service.core.s3.S3MetaDataEntry;
import com.cloud.bridge.service.core.s3.S3PutObjectRequest;
import com.cloud.bridge.service.core.s3.S3PutObjectResponse;
import com.cloud.bridge.service.exception.InternalErrorException;
import com.cloud.bridge.service.exception.InvalidBucketName;
import com.cloud.bridge.service.exception.NoSuchObjectException;
import com.cloud.bridge.service.exception.PermissionDeniedException;
import com.cloud.bridge.util.AuthenticationUtils;
import com.cloud.bridge.util.ConfigurationHelper;
import com.cloud.bridge.util.HeaderParam;
import com.cloud.bridge.util.RestAuth;
import com.cloud.bridge.util.S3SoapAuth;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Transaction;
import net.sf.ehcache.Cache;
public class S3RestServlet extends HttpServlet {
private static final long serialVersionUID = -6168996266762804877L;
public static final String ENABLE_S3_API="enable.s3.api";
private static boolean isS3APIEnabled = false;
public static final Logger logger = Logger.getLogger(S3RestServlet.class);
protected final CloudStackConfigurationDao csDao = ComponentLocator.inject(CloudStackConfigurationDaoImpl.class);
protected final UserCredentialsDao ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class);
protected void doGet(HttpServletRequest req, HttpServletResponse resp) {
processRequest( req, resp, "GET" );
}
}
protected void doPost(HttpServletRequest req, HttpServletResponse resp)
{
@ -106,15 +112,13 @@ public class S3RestServlet extends HttpServlet {
public void init( ServletConfig config ) throws ServletException {
try{
ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext());
UserCredentialsDao.preCheckTableExistence();
// check if API is enabled
CloudStackConfigurationDao csDao = new CloudStackConfigurationDao();
String value = csDao.getConfigValue(ENABLE_S3_API);
if(value != null) {
isS3APIEnabled = Boolean.valueOf(value);
}
PersistContext.commitTransaction(true);
PersistContext.closeSession(true);
logger.info("S3Engine :: Configuration value is : " + value);
}catch(Exception e){
throw new ServletException("Error initializing awsapi: " + e.getMessage());
}
@ -130,6 +134,7 @@ public class S3RestServlet extends HttpServlet {
*/
private void processRequest( HttpServletRequest request, HttpServletResponse response, String method )
{
Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true);
try {
logRequest(request);
@ -164,12 +169,13 @@ public class S3RestServlet extends HttpServlet {
}
txn.start();
// -> authenticated calls
if ( !((method.equalsIgnoreCase( "POST" ) && !(request.getQueryString().equalsIgnoreCase("delete"))) ) ){
S3AuthParams params = extractRequestHeaders( request );
authenticateRequest( request, params );
}
ServletAction action = routeRequest(request);
if ( action != null ) {
action.execute(request, response);
@ -178,35 +184,30 @@ public class S3RestServlet extends HttpServlet {
response.setStatus(404);
endResponse(response, "File not found");
}
PersistContext.commitTransaction();
txn.close();
}
catch( InvalidBucketName e) {
PersistContext.rollbackTransaction();
logger.error("Unexpected exception " + e.getMessage(), e);
response.setStatus(400);
endResponse(response, "Invalid Bucket Name - " + e.toString());
}
catch(PermissionDeniedException e) {
PersistContext.rollbackTransaction();
logger.error("Unexpected exception " + e.getMessage(), e);
response.setStatus(403);
endResponse(response, "Access denied - " + e.toString());
}
catch(Throwable e) {
PersistContext.rollbackTransaction();
logger.error("Unexpected exception " + e.getMessage(), e);
response.setStatus(404);
endResponse(response, "Bad request");
} finally {
try {
response.flushBuffer();
} catch (IOException e) {
logger.error("Unexpected exception " + e.getMessage(), e);
}
PersistContext.closeSession();
}
}
@ -239,6 +240,7 @@ public class S3RestServlet extends HttpServlet {
*
* As with all REST calls HTTPS should be used to ensure their security.
*/
@DB
private void setUserKeys( HttpServletRequest request, HttpServletResponse response ) {
String[] accessKey = null;
String[] secretKey = null;
@ -266,8 +268,14 @@ public class S3RestServlet extends HttpServlet {
try {
// -> use the keys to see if the account actually exists
//ServiceProvider.getInstance().getEC2Engine().validateAccount( accessKey[0], secretKey[0] );
UserCredentialsDao credentialDao = new UserCredentialsDao();
credentialDao.setUserKeys( accessKey[0], secretKey[0] );
//UserCredentialsDaoImpl credentialDao = new UserCredentialsDao();
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
txn.start();
UserCredentialsVO user = new UserCredentialsVO(accessKey[0], secretKey[0]);
user = ucDao.persist(user);
txn.commit();
txn.close();
//credentialDao.setUserKeys( accessKey[0], secretKey[0] );
} catch( Exception e ) {
logger.error("SetUserKeys " + e.getMessage(), e);
@ -586,7 +594,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request)
xml.append( "</soap:Body></soap:Envelope>" );
endResponse(response, xml.toString());
PersistContext.commitTransaction();
return;
}
@ -605,7 +612,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request)
xml.append( "</soap:Body></soap:Envelope>" );
endResponse(response, xml.toString());
PersistContext.commitTransaction();
}
catch(PermissionDeniedException e) {
logger.error("Unexpected exception " + e.getMessage(), e);
@ -618,7 +624,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request)
}
finally
{
PersistContext.closeSession();
}
}

View File

@ -49,14 +49,18 @@ import com.amazon.s3.GetBucketAccessControlPolicyResponse;
import com.amazon.s3.ListAllMyBucketsResponse;
import com.amazon.s3.ListBucketResponse;
import com.cloud.bridge.io.MTOMAwareResultStreamWriter;
import com.cloud.bridge.model.BucketPolicyVO;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.model.SAclVO;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.bridge.model.SHost;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.persist.dao.BucketPolicyDao;
import com.cloud.bridge.persist.dao.BucketPolicyDaoImpl;
import com.cloud.bridge.persist.dao.MultipartLoadDao;
import com.cloud.bridge.persist.dao.SAclDao;
import com.cloud.bridge.persist.dao.SAclDaoImpl;
import com.cloud.bridge.persist.dao.SBucketDao;
import com.cloud.bridge.persist.dao.SBucketDaoImpl;
import com.cloud.bridge.service.S3Constants;
import com.cloud.bridge.service.S3RestServlet;
import com.cloud.bridge.service.controller.s3.ServiceProvider;
@ -81,16 +85,13 @@ import com.cloud.bridge.service.core.s3.S3ListAllMyBucketsResponse;
import com.cloud.bridge.service.core.s3.S3ListBucketObjectEntry;
import com.cloud.bridge.service.core.s3.S3ListBucketRequest;
import com.cloud.bridge.service.core.s3.S3ListBucketResponse;
import com.cloud.bridge.service.core.s3.S3MetaDataEntry;
import com.cloud.bridge.service.core.s3.S3MultipartUpload;
import com.cloud.bridge.service.core.s3.S3PolicyContext;
import com.cloud.bridge.service.core.s3.S3PutObjectRequest;
import com.cloud.bridge.service.core.s3.S3Response;
import com.cloud.bridge.service.core.s3.S3SetBucketAccessControlPolicyRequest;
import com.cloud.bridge.service.core.s3.S3BucketPolicy.PolicyAccess;
import com.cloud.bridge.service.core.s3.S3PolicyAction.PolicyActions;
import com.cloud.bridge.service.core.s3.S3PolicyCondition.ConditionKeys;
import com.cloud.bridge.service.exception.InternalErrorException;
import com.cloud.bridge.service.exception.InvalidBucketName;
import com.cloud.bridge.service.exception.InvalidRequestContentException;
import com.cloud.bridge.service.exception.NetworkIOException;
@ -108,10 +109,14 @@ import com.cloud.bridge.util.Triple;
import com.cloud.bridge.util.XSerializer;
import com.cloud.bridge.util.XSerializerXmlAdapter;
import com.cloud.bridge.util.XmlHelper;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.Transaction;
public class S3BucketAction implements ServletAction {
protected final static Logger logger = Logger.getLogger(S3BucketAction.class);
protected final BucketPolicyDao bPolicyDao = ComponentLocator.inject(BucketPolicyDaoImpl.class);
protected final SBucketDao bucketDao = ComponentLocator.inject(SBucketDaoImpl.class);
private DocumentBuilderFactory dbf = null;
public S3BucketAction() {
@ -347,18 +352,16 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
String policy = streamToString( request.getInputStream());
// [A] Is there an owner of an existing policy or bucket?
BucketPolicyDao policyDao = new BucketPolicyDao();
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName( bucketName );
SBucketVO bucket = bucketDao.getByName( bucketName );
String owner = null;
if ( null != bucket )
{
owner = bucket.getOwnerCanonicalId();
owner = bucket.getOwnerCanonicalId();
}
else
{ try {
owner = policyDao.getPolicyOwner( bucketName );
owner = bPolicyDao.getByName(bucketName).getOwnerCanonicalID();
}
catch( Exception e ) {}
}
@ -366,36 +369,42 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
// [B] "The bucket owner by default has permissions to attach bucket policies to their buckets using PUT Bucket policy."
// -> the bucket owner may want to restrict the IP address from where this can be executed
String client = UserContext.current().getCanonicalUserId();
S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketPolicy, bucketName );
switch( S3Engine.verifyPolicy( context )) {
case ALLOW:
break;
case DEFAULT_DENY:
if (null != owner && !client.equals( owner )) {
response.setStatus(405);
return;
}
break;
case DENY:
response.setStatus(403);
return;
}
String client = UserContext.current().getCanonicalUserId();
S3PolicyContext context = new S3PolicyContext(
PolicyActions.PutBucketPolicy, bucketName);
switch (S3Engine.verifyPolicy(context)) {
case ALLOW:
break;
case DEFAULT_DENY:
if (null != owner && !client.equals(owner)) {
response.setStatus(405);
return;
}
break;
case DENY:
response.setStatus(403);
return;
}
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
// [B] Place the policy into the database over writting an existing policy
try {
// -> first make sure that the policy is valid by parsing it
PolicyParser parser = new PolicyParser();
S3BucketPolicy sbp = parser.parse( policy, bucketName );
policyDao.deletePolicy( bucketName );
if (null != policy && !policy.isEmpty()) policyDao.addPolicy( bucketName, client, policy );
bPolicyDao.deletePolicy(bucketName);
if (null != policy && !policy.isEmpty()) {
BucketPolicyVO bpolicy = new BucketPolicyVO(bucketName, client, policy);
bpolicy = bPolicyDao.persist(bpolicy);
//policyDao.addPolicy( bucketName, client, policy );
}
if (null != sbp) ServiceProvider.getInstance().setBucketPolicy( bucketName, sbp );
response.setStatus(200);
response.setStatus(200);
txn.commit();
txn.close();
}
catch( PermissionDeniedException e ) {
logger.error("Put Bucket Policy failed due to " + e.getMessage(), e);
@ -416,185 +425,193 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
// [A] Is there an owner of an existing policy or bucket?
BucketPolicyDao policyDao = new BucketPolicyDao();
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName( bucketName );
String owner = null;
if ( null != bucket )
{
owner = bucket.getOwnerCanonicalId();
}
else
{ try {
owner = policyDao.getPolicyOwner( bucketName );
}
catch( Exception e ) {}
}
SBucketVO bucket = bucketDao.getByName(bucketName);
String owner = null;
// [B] "The bucket owner by default has permissions to retrieve bucket policies using GET Bucket policy."
// -> the bucket owner may want to restrict the IP address from where this can be executed
String client = UserContext.current().getCanonicalUserId();
S3PolicyContext context = new S3PolicyContext( PolicyActions.GetBucketPolicy, bucketName );
switch( S3Engine.verifyPolicy( context )) {
case ALLOW:
break;
case DEFAULT_DENY:
if (null != owner && !client.equals( owner )) {
response.setStatus(405);
return;
}
break;
case DENY:
response.setStatus(403);
return;
}
// [B] Pull the policy from the database if one exists
try {
String policy = policyDao.getPolicy( bucketName );
if ( null == policy ) {
response.setStatus(404);
}
else {
response.setStatus(200);
response.setContentType("application/json");
S3RestServlet.endResponse(response, policy);
}
}
catch( Exception e ) {
logger.error("Get Bucket Policy failed due to " + e.getMessage(), e);
response.setStatus(500);
}
if (null != bucket) {
owner = bucket.getOwnerCanonicalId();
} else {
try {
owner = bPolicyDao.getByName(bucketName).getOwnerCanonicalID();
} catch (Exception e) {
}
}
private void executeDeleteBucketPolicy(HttpServletRequest request, HttpServletResponse response)
{
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName( bucketName );
if (bucket != null)
{
String client = UserContext.current().getCanonicalUserId();
if (!client.equals( bucket.getOwnerCanonicalId())) {
response.setStatus(405);
return;
}
}
// [B]
// "The bucket owner by default has permissions to retrieve bucket policies using GET Bucket policy."
// -> the bucket owner may want to restrict the IP address from where
// this can be executed
String client = UserContext.current().getCanonicalUserId();
S3PolicyContext context = new S3PolicyContext(
PolicyActions.GetBucketPolicy, bucketName);
switch (S3Engine.verifyPolicy(context)) {
case ALLOW:
break;
try {
BucketPolicyDao policyDao = new BucketPolicyDao();
String policy = policyDao.getPolicy( bucketName );
if ( null == policy ) {
response.setStatus(204);
}
else {
ServiceProvider.getInstance().deleteBucketPolicy( bucketName );
policyDao.deletePolicy( bucketName );
response.setStatus(200);
}
}
catch( Exception e ) {
logger.error("Delete Bucket Policy failed due to " + e.getMessage(), e);
response.setStatus(500);
}
case DEFAULT_DENY:
if (null != owner && !client.equals(owner)) {
response.setStatus(405);
return;
}
break;
case DENY:
response.setStatus(403);
return;
}
public void executeGetAllBuckets(HttpServletRequest request, HttpServletResponse response)
throws IOException, XMLStreamException
{
Calendar cal = Calendar.getInstance();
cal.set( 1970, 1, 1 );
S3ListAllMyBucketsRequest engineRequest = new S3ListAllMyBucketsRequest();
engineRequest.setAccessKey(UserContext.current().getAccessKey());
engineRequest.setRequestTimestamp( cal );
engineRequest.setSignature( "" );
// [B] Pull the policy from the database if one exists
try {
String policy = bPolicyDao.getByName(bucketName).getPolicy();
if (null == policy) {
response.setStatus(404);
} else {
response.setStatus(200);
response.setContentType("application/json");
S3RestServlet.endResponse(response, policy);
}
} catch (Exception e) {
logger.error("Get Bucket Policy failed due to " + e.getMessage(), e);
response.setStatus(500);
}
}
private void executeDeleteBucketPolicy(HttpServletRequest request,
HttpServletResponse response) {
String bucketName = (String) request
.getAttribute(S3Constants.BUCKET_ATTR_KEY);
S3ListAllMyBucketsResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest);
// To allow the all buckets list to be serialized via Axiom classes
ListAllMyBucketsResponse allBuckets = S3SerializableServiceImplementation.toListAllMyBucketsResponse( engineResponse );
OutputStream outputStream = response.getOutputStream();
response.setStatus(200);
response.setContentType("application/xml");
// The content-type literally should be "application/xml; charset=UTF-8"
// but any compliant JVM supplies utf-8 by default
// MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListAllMyBucketsResult", outputStream );
// resultWriter.startWrite();
// resultWriter.writeout(allBuckets);
// resultWriter.stopWrite();
StringBuffer xml = new StringBuffer();
xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" );
xml.append("<ListAllMyBucketsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">");
xml.append("<Owner><ID>");
xml.append(engineResponse.getOwner().getID()).append("</ID>");
xml.append("<DisplayName>").append(engineResponse.getOwner().getDisplayName()).append("</DisplayName>");
xml.append("</Owner>").append("<Buckets>");
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ");
for (S3ListAllMyBucketsEntry entry :engineResponse.getBuckets()) {
xml.append("<Bucket>").append("<Name>").append(entry.getName()).append("</Name>");
xml.append("<CreationDate>").append(sdf.format(entry.getCreationDate().getTime())).append("</CreationDate>");
xml.append("</Bucket>");
}
xml.append("</Buckets>").append("</ListAllMyBucketsResult>");
response.setStatus(200);
response.setContentType("text/xml; charset=UTF-8");
S3RestServlet.endResponse(response, xml.toString());
SBucketVO bucket = bucketDao.getByName(bucketName);
if (bucket != null) {
String client = UserContext.current().getCanonicalUserId();
if (!client.equals(bucket.getOwnerCanonicalId())) {
response.setStatus(405);
return;
}
}
try {
String policy = bPolicyDao.getByName(bucketName).getPolicy();
if (null == policy) {
response.setStatus(204);
} else {
ServiceProvider.getInstance().deleteBucketPolicy(bucketName);
bPolicyDao.deletePolicy(bucketName);
response.setStatus(200);
}
} catch (Exception e) {
logger.error(
"Delete Bucket Policy failed due to " + e.getMessage(), e);
response.setStatus(500);
}
}
public void executeGetAllBuckets(HttpServletRequest request,
HttpServletResponse response) throws IOException,
XMLStreamException {
Calendar cal = Calendar.getInstance();
cal.set(1970, 1, 1);
S3ListAllMyBucketsRequest engineRequest = new S3ListAllMyBucketsRequest();
engineRequest.setAccessKey(UserContext.current().getAccessKey());
engineRequest.setRequestTimestamp(cal);
engineRequest.setSignature("");
S3ListAllMyBucketsResponse engineResponse = ServiceProvider
.getInstance().getS3Engine().handleRequest(engineRequest);
// To allow the all buckets list to be serialized via Axiom classes
ListAllMyBucketsResponse allBuckets = S3SerializableServiceImplementation
.toListAllMyBucketsResponse(engineResponse);
OutputStream outputStream = response.getOutputStream();
response.setStatus(200);
response.setContentType("application/xml");
// The content-type literally should be "application/xml; charset=UTF-8"
// but any compliant JVM supplies utf-8 by default
// MTOMAwareResultStreamWriter resultWriter = new
// MTOMAwareResultStreamWriter ("ListAllMyBucketsResult", outputStream
// );
// resultWriter.startWrite();
// resultWriter.writeout(allBuckets);
// resultWriter.stopWrite();
StringBuffer xml = new StringBuffer();
xml.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
xml.append("<ListAllMyBucketsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">");
xml.append("<Owner><ID>");
xml.append(engineResponse.getOwner().getID()).append("</ID>");
xml.append("<DisplayName>")
.append(engineResponse.getOwner().getDisplayName())
.append("</DisplayName>");
xml.append("</Owner>").append("<Buckets>");
SimpleDateFormat sdf = new SimpleDateFormat(
"yyyy-MM-dd'T'HH:mm:ss.SSSZ");
for (S3ListAllMyBucketsEntry entry : engineResponse.getBuckets()) {
xml.append("<Bucket>").append("<Name>").append(entry.getName())
.append("</Name>");
xml.append("<CreationDate>")
.append(sdf.format(entry.getCreationDate().getTime()))
.append("</CreationDate>");
xml.append("</Bucket>");
}
xml.append("</Buckets>").append("</ListAllMyBucketsResult>");
response.setStatus(200);
response.setContentType("text/xml; charset=UTF-8");
S3RestServlet.endResponse(response, xml.toString());
}
public void executeGetBucket(HttpServletRequest request, HttpServletResponse response)
throws IOException, XMLStreamException
{
S3ListBucketRequest engineRequest = new S3ListBucketRequest();
engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY));
engineRequest.setDelimiter(request.getParameter("delimiter"));
engineRequest.setMarker(request.getParameter("marker"));
engineRequest.setPrefix(request.getParameter("prefix"));
int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000);
engineRequest.setMaxKeys(maxKeys);
try {
S3ListBucketResponse engineResponse = ServiceProvider.getInstance().getS3Engine().listBucketContents( engineRequest, false );
// To allow the all list buckets result to be serialized via Axiom classes
ListBucketResponse oneBucket = S3SerializableServiceImplementation.toListBucketResponse( engineResponse );
OutputStream outputStream = response.getOutputStream();
response.setStatus(200);
response.setContentType("application/xml");
// The content-type literally should be "application/xml; charset=UTF-8"
// but any compliant JVM supplies utf-8 by default;
MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListBucketResult", outputStream );
resultWriter.startWrite();
resultWriter.writeout(oneBucket);
resultWriter.stopWrite();
} catch (NoSuchObjectException nsoe) {
response.setStatus(404);
response.setContentType("application/xml");
S3ListBucketRequest engineRequest = new S3ListBucketRequest();
engineRequest.setBucketName((String) request
.getAttribute(S3Constants.BUCKET_ATTR_KEY));
engineRequest.setDelimiter(request.getParameter("delimiter"));
engineRequest.setMarker(request.getParameter("marker"));
engineRequest.setPrefix(request.getParameter("prefix"));
StringBuffer xmlError = new StringBuffer();
xmlError.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")
int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000);
engineRequest.setMaxKeys(maxKeys);
try {
S3ListBucketResponse engineResponse = ServiceProvider.getInstance()
.getS3Engine().listBucketContents(engineRequest, false);
// To allow the all list buckets result to be serialized via Axiom
// classes
ListBucketResponse oneBucket = S3SerializableServiceImplementation
.toListBucketResponse(engineResponse);
OutputStream outputStream = response.getOutputStream();
response.setStatus(200);
response.setContentType("application/xml");
// The content-type literally should be
// "application/xml; charset=UTF-8"
// but any compliant JVM supplies utf-8 by default;
MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter(
"ListBucketResult", outputStream);
resultWriter.startWrite();
resultWriter.writeout(oneBucket);
resultWriter.stopWrite();
} catch (NoSuchObjectException nsoe) {
response.setStatus(404);
response.setContentType("application/xml");
StringBuffer xmlError = new StringBuffer();
xmlError.append("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")
.append("<Error><Code>NoSuchBucket</Code><Message>The specified bucket does not exist</Message>")
.append("<BucketName>").append((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY))
.append("<BucketName>")
.append((String) request
.getAttribute(S3Constants.BUCKET_ATTR_KEY))
.append("</BucketName>")
.append("<RequestId>1DEADBEEF9</RequestId>") //TODO
.append("<HostId>abCdeFgHiJ1k2LmN3op4q56r7st89</HostId>") //TODO
.append("<RequestId>1DEADBEEF9</RequestId>") // TODO
.append("<HostId>abCdeFgHiJ1k2LmN3op4q56r7st89</HostId>") // TODO
.append("</Error>");
S3RestServlet.endResponse(response, xmlError.toString());
S3RestServlet.endResponse(response, xmlError.toString());
}
}
}
@ -640,8 +657,7 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
return;
}
SBucketDao bucketDao = new SBucketDao();
SBucket sbucket = bucketDao.getByName( bucketName );
SBucketVO sbucket = bucketDao.getByName( bucketName );
if (sbucket == null) {
response.setStatus( 404 );
return;
@ -834,111 +850,124 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
public void executePutBucketAcl(HttpServletRequest request, HttpServletResponse response) throws IOException
{
// [A] Determine that there is an applicable bucket which might have an ACL set
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName( bucketName );
String owner = null;
if ( null != bucket )
owner = bucket.getOwnerCanonicalId();
if (null == owner)
{
logger.error( "ACL update failed since " + bucketName + " does not exist" );
throw new IOException("ACL update failed");
}
// [B] Obtain the grant request which applies to the acl request string. This latter is supplied as the value of the x-amz-acl header.
S3SetBucketAccessControlPolicyRequest engineRequest = new S3SetBucketAccessControlPolicyRequest();
S3Grant grantRequest = new S3Grant();
S3AccessControlList aclRequest = new S3AccessControlList();
String aclRequestString = request.getHeader("x-amz-acl");
OrderedPair <Integer,Integer> accessControlsForBucketOwner = SAcl.getCannedAccessControls(aclRequestString,"SBucket");
grantRequest.setPermission(accessControlsForBucketOwner.getFirst());
grantRequest.setGrantee(accessControlsForBucketOwner.getSecond());
grantRequest.setCanonicalUserID(owner);
aclRequest.addGrant(grantRequest);
engineRequest.setAcl(aclRequest);
engineRequest.setBucketName(bucketName);
// [C] Allow an S3Engine to handle the S3SetBucketAccessControlPolicyRequest
S3Response engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest);
response.setStatus( engineResponse.getResultCode());
String bucketName = (String) request
.getAttribute(S3Constants.BUCKET_ATTR_KEY);
SBucketVO bucket = bucketDao.getByName(bucketName);
String owner = null;
if (null != bucket)
owner = bucket.getOwnerCanonicalId();
if (null == owner) {
logger.error("ACL update failed since " + bucketName
+ " does not exist");
throw new IOException("ACL update failed");
}
// [B] Obtain the grant request which applies to the acl request string.
// This latter is supplied as the value of the x-amz-acl header.
S3SetBucketAccessControlPolicyRequest engineRequest = new S3SetBucketAccessControlPolicyRequest();
S3Grant grantRequest = new S3Grant();
S3AccessControlList aclRequest = new S3AccessControlList();
String aclRequestString = request.getHeader("x-amz-acl");
OrderedPair<Integer, Integer> accessControlsForBucketOwner = SAclVO.getCannedAccessControls(aclRequestString, "SBucket");
grantRequest.setPermission(accessControlsForBucketOwner.getFirst());
grantRequest.setGrantee(accessControlsForBucketOwner.getSecond());
grantRequest.setCanonicalUserID(owner);
aclRequest.addGrant(grantRequest);
engineRequest.setAcl(aclRequest);
engineRequest.setBucketName(bucketName);
// [C] Allow an S3Engine to handle the
// S3SetBucketAccessControlPolicyRequest
S3Response engineResponse = ServiceProvider.getInstance().getS3Engine()
.handleRequest(engineRequest);
response.setStatus(engineResponse.getResultCode());
}
public void executePutBucketVersioning(HttpServletRequest request, HttpServletResponse response) throws IOException
{
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
String versioningStatus = null;
Node item = null;
String bucketName = (String) request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
String versioningStatus = null;
Node item = null;
if (null == bucketName) {
logger.error( "executePutBucketVersioning - no bucket name given" );
response.setStatus( 400 );
return;
}
// -> is the XML as defined?
try {
DocumentBuilder db = dbf.newDocumentBuilder();
Document restXML = db.parse( request.getInputStream());
NodeList match = S3RestServlet.getElement( restXML, "http://s3.amazonaws.com/doc/2006-03-01/", "Status" );
if ( 0 < match.getLength())
{
item = match.item(0);
versioningStatus = new String( item.getFirstChild().getNodeValue());
}
else
{ logger.error( "executePutBucketVersioning - cannot find Status tag in XML body" );
response.setStatus( 400 );
return;
}
}
catch( Exception e ) {
logger.error( "executePutBucketVersioning - failed to parse XML due to " + e.getMessage(), e);
response.setStatus(400);
return;
}
try {
// Irrespective of what the ACLs say only the owner can turn on versioning on a bucket.
// The bucket owner may want to restrict the IP address from which this can occur.
SBucketDao bucketDao = new SBucketDao();
SBucket sbucket = bucketDao.getByName( bucketName );
String client = UserContext.current().getCanonicalUserId();
if (!client.equals( sbucket.getOwnerCanonicalId()))
throw new PermissionDeniedException( "Access Denied - only the owner can turn on versioing on a bucket" );
S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketVersioning, bucketName );
if (PolicyAccess.DENY == S3Engine.verifyPolicy( context )) {
response.setStatus(403);
return;
}
if (null == bucketName) {
logger.error("executePutBucketVersioning - no bucket name given");
response.setStatus(400);
return;
}
if (versioningStatus.equalsIgnoreCase( "Enabled" )) sbucket.setVersioningStatus( 1 );
else if (versioningStatus.equalsIgnoreCase( "Suspended")) sbucket.setVersioningStatus( 2 );
else {
logger.error( "executePutBucketVersioning - unknown state: [" + versioningStatus + "]" );
response.setStatus( 400 );
return;
}
bucketDao.update( sbucket );
} catch( PermissionDeniedException e ) {
logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e);
throw e;
} catch( Exception e ) {
logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e);
response.setStatus(500);
return;
}
response.setStatus(200);
// -> is the XML as defined?
try {
DocumentBuilder db = dbf.newDocumentBuilder();
Document restXML = db.parse(request.getInputStream());
NodeList match = S3RestServlet.getElement(restXML,
"http://s3.amazonaws.com/doc/2006-03-01/", "Status");
if (0 < match.getLength()) {
item = match.item(0);
versioningStatus = new String(item.getFirstChild()
.getNodeValue());
} else {
logger.error("executePutBucketVersioning - cannot find Status tag in XML body");
response.setStatus(400);
return;
}
} catch (Exception e) {
logger.error(
"executePutBucketVersioning - failed to parse XML due to "
+ e.getMessage(), e);
response.setStatus(400);
return;
}
try {
// Irrespective of what the ACLs say only the owner can turn on
// versioning on a bucket.
// The bucket owner may want to restrict the IP address from which
// this can occur.
SBucketVO sbucket = bucketDao.getByName(bucketName);
String client = UserContext.current().getCanonicalUserId();
if (!client.equals(sbucket.getOwnerCanonicalId()))
throw new PermissionDeniedException(
"Access Denied - only the owner can turn on versioing on a bucket");
S3PolicyContext context = new S3PolicyContext(
PolicyActions.PutBucketVersioning, bucketName);
if (PolicyAccess.DENY == S3Engine.verifyPolicy(context)) {
response.setStatus(403);
return;
}
if (versioningStatus.equalsIgnoreCase("Enabled"))
sbucket.setVersioningStatus(1);
else if (versioningStatus.equalsIgnoreCase("Suspended"))
sbucket.setVersioningStatus(2);
else {
logger.error("executePutBucketVersioning - unknown state: ["
+ versioningStatus + "]");
response.setStatus(400);
return;
}
bucketDao.update(sbucket.getId(), sbucket);
} catch (PermissionDeniedException e) {
logger.error(
"executePutBucketVersioning - failed due to "
+ e.getMessage(), e);
throw e;
} catch (Exception e) {
logger.error(
"executePutBucketVersioning - failed due to "
+ e.getMessage(), e);
response.setStatus(500);
return;
}
response.setStatus(200);
}
public void executePutBucketLogging(HttpServletRequest request, HttpServletResponse response) throws IOException {
@ -949,7 +978,7 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
public void executePutBucketWebsite(HttpServletRequest request, HttpServletResponse response) throws IOException {
// TODO -- LoPri - Undertake checks on Put Bucket Website
// Tested using configuration <Directory /Users/john1/S3-Mount>\nAllowOverride FileInfo AuthConfig Limit...</Directory> in httpd.conf
// Need some way of using AllowOverride to allow use of .htaccess and then pushing .httaccess file to bucket subdirectory of mount point
// Need some way of using AllowOverride to allow use of .htaccess and then pushing .httaccess file to bucket subdirectory of mount point
// Currently has noop effect in the sense that a running apachectl process sees the directory contents without further action
response.setStatus(200);
}
@ -976,128 +1005,145 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes
public void executeListMultipartUploads(HttpServletRequest request, HttpServletResponse response) throws IOException
{
// [A] Obtain parameters and do basic bucket verification
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
String delimiter = request.getParameter("delimiter");
String keyMarker = request.getParameter("key-marker");
String prefix = request.getParameter("prefix");
int maxUploads = 1000;
int nextUploadId = 0;
String nextKey = null;
boolean isTruncated = false;
S3MultipartUpload[] uploads = null;
S3MultipartUpload onePart = null;
String temp = request.getParameter("max-uploads");
if (null != temp) {
maxUploads = Integer.parseInt( temp );
if (maxUploads > 1000 || maxUploads < 0) maxUploads = 1000;
}
// -> upload-id-marker is ignored unless key-marker is also specified
String uploadIdMarker = request.getParameter("upload-id-marker");
if (null == keyMarker) uploadIdMarker = null;
// -> does the bucket exist, we may need it to verify access permissions
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName(bucketName);
if (bucket == null) {
logger.error( "listMultipartUpload failed since " + bucketName + " does not exist" );
response.setStatus(404);
return;
String bucketName = (String) request
.getAttribute(S3Constants.BUCKET_ATTR_KEY);
String delimiter = request.getParameter("delimiter");
String keyMarker = request.getParameter("key-marker");
String prefix = request.getParameter("prefix");
int maxUploads = 1000;
int nextUploadId = 0;
String nextKey = null;
boolean isTruncated = false;
S3MultipartUpload[] uploads = null;
S3MultipartUpload onePart = null;
String temp = request.getParameter("max-uploads");
if (null != temp) {
maxUploads = Integer.parseInt(temp);
if (maxUploads > 1000 || maxUploads < 0)
maxUploads = 1000;
}
// -> upload-id-marker is ignored unless key-marker is also specified
String uploadIdMarker = request.getParameter("upload-id-marker");
if (null == keyMarker)
uploadIdMarker = null;
// -> does the bucket exist, we may need it to verify access permissions
SBucketVO bucket = bucketDao.getByName(bucketName);
if (bucket == null) {
logger.error("listMultipartUpload failed since " + bucketName
+ " does not exist");
response.setStatus(404);
return;
}
S3PolicyContext context = new S3PolicyContext(
PolicyActions.ListBucketMultipartUploads, bucketName);
context.setEvalParam(ConditionKeys.Prefix, prefix);
context.setEvalParam(ConditionKeys.Delimiter, delimiter);
S3Engine.verifyAccess(context, "SBucket", bucket.getId(),
SAcl.PERMISSION_READ);
// [B] Query the multipart table to get the list of current uploads
try {
MultipartLoadDao uploadDao = new MultipartLoadDao();
OrderedPair<S3MultipartUpload[], Boolean> result = uploadDao
.getInitiatedUploads(bucketName, maxUploads, prefix,
keyMarker, uploadIdMarker);
uploads = result.getFirst();
isTruncated = result.getSecond().booleanValue();
} catch (Exception e) {
logger.error(
"List Multipart Uploads failed due to " + e.getMessage(), e);
response.setStatus(500);
}
StringBuffer xml = new StringBuffer();
xml.append("<?xml version=\"1.0\" encoding=\"utf-8\"?>");
xml.append("<ListMultipartUploadsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">");
xml.append("<Bucket>").append(bucketName).append("</Bucket>");
xml.append("<KeyMarker>").append((null == keyMarker ? "" : keyMarker))
.append("</KeyMarker>");
xml.append("<UploadIdMarker>")
.append((null == uploadIdMarker ? "" : uploadIdMarker))
.append("</UploadIdMarker>");
// [C] Construct the contents of the <Upload> element
StringBuffer partsList = new StringBuffer();
for (int i = 0; i < uploads.length; i++) {
onePart = uploads[i];
if (null == onePart)
break;
if (delimiter != null && !delimiter.isEmpty()) {
// -> is this available only in the CommonPrefixes element?
if (StringHelper.substringInBetween(onePart.getKey(), prefix,
delimiter) != null)
continue;
}
nextKey = onePart.getKey();
nextUploadId = onePart.getId();
partsList.append("<Upload>");
partsList.append("<Key>").append(nextKey).append("</Key>");
partsList.append("<UploadId>").append(nextUploadId)
.append("</UploadId>");
partsList.append("<Initiator>");
partsList.append("<ID>").append(onePart.getAccessKey())
.append("</ID>");
partsList.append("<DisplayName></DisplayName>");
partsList.append("</Initiator>");
partsList.append("<Owner>");
partsList.append("<ID>").append(onePart.getAccessKey())
.append("</ID>");
partsList.append("<DisplayName></DisplayName>");
partsList.append("</Owner>");
partsList.append("<StorageClass>STANDARD</StorageClass>");
partsList
.append("<Initiated>")
.append(DatatypeConverter.printDateTime(onePart
.getLastModified())).append("</Initiated>");
partsList.append("</Upload>");
}
// [D] Construct the contents of the <CommonPrefixes> elements (if any)
for (int i = 0; i < uploads.length; i++) {
onePart = uploads[i];
if (null == onePart)
break;
if (delimiter != null && !delimiter.isEmpty()) {
String subName = StringHelper.substringInBetween(
onePart.getKey(), prefix, delimiter);
if (subName != null) {
partsList.append("<CommonPrefixes>");
partsList.append("<Prefix>");
if (prefix != null && prefix.length() > 0)
partsList.append(prefix + delimiter + subName);
else
partsList.append(subName);
partsList.append("</Prefix>");
partsList.append("</CommonPrefixes>");
}
S3PolicyContext context = new S3PolicyContext( PolicyActions.ListBucketMultipartUploads, bucketName );
context.setEvalParam( ConditionKeys.Prefix, prefix );
context.setEvalParam( ConditionKeys.Delimiter, delimiter );
S3Engine.verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_READ );
}
}
// [B] Query the multipart table to get the list of current uploads
try {
MultipartLoadDao uploadDao = new MultipartLoadDao();
OrderedPair<S3MultipartUpload[],Boolean> result = uploadDao.getInitiatedUploads( bucketName, maxUploads, prefix, keyMarker, uploadIdMarker );
uploads = result.getFirst();
isTruncated = result.getSecond().booleanValue();
}
catch( Exception e ) {
logger.error("List Multipart Uploads failed due to " + e.getMessage(), e);
response.setStatus(500);
}
// [D] Finish off the response
xml.append("<NextKeyMarker>").append((null == nextKey ? "" : nextKey))
.append("</NextKeyMarker>");
xml.append("<NextUploadIdMarker>")
.append((0 == nextUploadId ? "" : nextUploadId))
.append("</NextUploadIdMarker>");
xml.append("<MaxUploads>").append(maxUploads).append("</MaxUploads>");
xml.append("<IsTruncated>").append(isTruncated)
.append("</IsTruncated>");
StringBuffer xml = new StringBuffer();
xml.append( "<?xml version=\"1.0\" encoding=\"utf-8\"?>" );
xml.append( "<ListMultipartUploadsResult xmlns=\"http://s3.amazonaws.com/doc/2006-03-01/\">" );
xml.append( "<Bucket>" ).append( bucketName ).append( "</Bucket>" );
xml.append( "<KeyMarker>").append((null == keyMarker ? "" : keyMarker)).append( "</KeyMarker>" );
xml.append( "<UploadIdMarker>").append((null == uploadIdMarker ? "" : uploadIdMarker)).append( "</UploadIdMarker>" );
// [C] Construct the contents of the <Upload> element
StringBuffer partsList = new StringBuffer();
for( int i=0; i < uploads.length; i++ )
{
onePart = uploads[i];
if (null == onePart) break;
if (delimiter != null && !delimiter.isEmpty())
{
// -> is this available only in the CommonPrefixes element?
if (StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter) != null)
continue;
}
nextKey = onePart.getKey();
nextUploadId = onePart.getId();
partsList.append( "<Upload>" );
partsList.append( "<Key>" ).append( nextKey ).append( "</Key>" );
partsList.append( "<UploadId>" ).append( nextUploadId ).append( "</UploadId>" );
partsList.append( "<Initiator>" );
partsList.append( "<ID>" ).append( onePart.getAccessKey()).append( "</ID>" );
partsList.append( "<DisplayName></DisplayName>" );
partsList.append( "</Initiator>" );
partsList.append( "<Owner>" );
partsList.append( "<ID>" ).append( onePart.getAccessKey()).append( "</ID>" );
partsList.append( "<DisplayName></DisplayName>" );
partsList.append( "</Owner>" );
partsList.append( "<StorageClass>STANDARD</StorageClass>" );
partsList.append( "<Initiated>" ).append( DatatypeConverter.printDateTime( onePart.getLastModified())).append( "</Initiated>" );
partsList.append( "</Upload>" );
}
// [D] Construct the contents of the <CommonPrefixes> elements (if any)
for( int i=0; i < uploads.length; i++ )
{
onePart = uploads[i];
if (null == onePart) break;
xml.append(partsList.toString());
xml.append("</ListMultipartUploadsResult>");
if (delimiter != null && !delimiter.isEmpty())
{
String subName = StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter);
if (subName != null)
{
partsList.append( "<CommonPrefixes>" );
partsList.append( "<Prefix>" );
if ( prefix != null && prefix.length() > 0 )
partsList.append( prefix + delimiter + subName );
else partsList.append( subName );
partsList.append( "</Prefix>" );
partsList.append( "</CommonPrefixes>" );
}
}
}
// [D] Finish off the response
xml.append( "<NextKeyMarker>" ).append((null == nextKey ? "" : nextKey)).append( "</NextKeyMarker>" );
xml.append( "<NextUploadIdMarker>" ).append((0 == nextUploadId ? "" : nextUploadId)).append( "</NextUploadIdMarker>" );
xml.append( "<MaxUploads>" ).append( maxUploads ).append( "</MaxUploads>" );
xml.append( "<IsTruncated>" ).append( isTruncated ).append( "</IsTruncated>" );
xml.append( partsList.toString());
xml.append( "</ListMultipartUploadsResult>" );
response.setStatus(200);
response.setContentType("text/xml; charset=UTF-8");
S3RestServlet.endResponse(response, xml.toString());
response.setStatus(200);
response.setContentType("text/xml; charset=UTF-8");
S3RestServlet.endResponse(response, xml.toString());
}
private String streamToString( InputStream is ) throws IOException

View File

@ -45,9 +45,12 @@ import com.amazon.s3.CopyObjectResponse;
import com.amazon.s3.GetObjectAccessControlPolicyResponse;
import com.cloud.bridge.io.MTOMAwareResultStreamWriter;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.model.SAclVO;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.model.SBucketVO;
import com.cloud.bridge.persist.dao.MultipartLoadDao;
import com.cloud.bridge.persist.dao.SBucketDao;
import com.cloud.bridge.persist.dao.SBucketDaoImpl;
import com.cloud.bridge.service.S3Constants;
import com.cloud.bridge.service.S3RestServlet;
import com.cloud.bridge.service.UserContext;
@ -79,9 +82,11 @@ import com.cloud.bridge.util.DateHelper;
import com.cloud.bridge.util.HeaderParam;
import com.cloud.bridge.util.ServletRequestDataSource;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.utils.component.ComponentLocator;
public class S3ObjectAction implements ServletAction {
protected final static Logger logger = Logger.getLogger(S3ObjectAction.class);
protected final SBucketDao bucketDao = ComponentLocator.inject(SBucketDaoImpl.class);
private DocumentBuilderFactory dbf = null;
@ -273,8 +278,7 @@ public class S3ObjectAction implements ServletAction {
String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY);
String key = (String)request.getAttribute(S3Constants.OBJECT_ATTR_KEY);
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName( bucketName );
SBucketVO bucket = bucketDao.getByName( bucketName );
String owner = null;
if ( null != bucket )
owner = bucket.getOwnerCanonicalId();
@ -296,7 +300,7 @@ public class S3ObjectAction implements ServletAction {
S3AccessControlList aclRequest = new S3AccessControlList();
String aclRequestString = request.getHeader("x-amz-acl");
OrderedPair <Integer,Integer> accessControlsForObjectOwner = SAcl.getCannedAccessControls(aclRequestString,"SObject");
OrderedPair <Integer,Integer> accessControlsForObjectOwner = SAclVO.getCannedAccessControls(aclRequestString,"SObject");
grantRequest.setPermission(accessControlsForObjectOwner.getFirst());
grantRequest.setGrantee(accessControlsForObjectOwner.getSecond());
grantRequest.setCanonicalUserID(owner);
@ -484,6 +488,11 @@ public class S3ObjectAction implements ServletAction {
S3GetObjectResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest( engineRequest );
response.setStatus( engineResponse.getResultCode());
//bucket lookup for non-existance key
if ( engineResponse.getResultCode() == 404 )
return;
String deleteMarker = engineResponse.getDeleteMarker();
if ( null != deleteMarker ) {
@ -891,8 +900,7 @@ public class S3ObjectAction implements ServletAction {
// -> does the bucket exist, we may need it to verify access permissions
SBucketDao bucketDao = new SBucketDao();
SBucket bucket = bucketDao.getByName(bucketName);
SBucketVO bucket = bucketDao.getByName(bucketName);
if (bucket == null) {
logger.error( "listUploadParts failed since " + bucketName + " does not exist" );
response.setStatus(404);

View File

@ -35,18 +35,19 @@ import java.util.TimerTask;
import org.apache.axis2.AxisFault;
import org.apache.log4j.Logger;
import org.apache.log4j.xml.DOMConfigurator;
import org.hibernate.SessionException;
import com.amazon.s3.AmazonS3SkeletonInterface;
import com.amazon.ec2.AmazonEC2SkeletonInterface;
import com.cloud.bridge.model.MHost;
import com.cloud.bridge.model.MHostVO;
import com.cloud.bridge.model.SHost;
import com.cloud.bridge.model.UserCredentials;
import com.cloud.bridge.persist.PersistContext;
import com.cloud.bridge.persist.PersistException;
import com.cloud.bridge.model.SHostVO;
import com.cloud.bridge.model.UserCredentialsVO;
import com.cloud.bridge.persist.dao.MHostDao;
import com.cloud.bridge.persist.dao.MHostDaoImpl;
import com.cloud.bridge.persist.dao.SHostDao;
import com.cloud.bridge.persist.dao.SHostDaoImpl;
import com.cloud.bridge.persist.dao.UserCredentialsDao;
import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl;
import com.cloud.bridge.service.EC2SoapServiceImpl;
import com.cloud.bridge.service.UserInfo;
import com.cloud.bridge.service.core.ec2.EC2Engine;
@ -57,17 +58,23 @@ import com.cloud.bridge.util.ConfigurationHelper;
import com.cloud.bridge.util.DateHelper;
import com.cloud.bridge.util.NetHelper;
import com.cloud.bridge.util.OrderedPair;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.DB;
import com.cloud.utils.db.Transaction;
public class ServiceProvider {
protected final static Logger logger = Logger.getLogger(ServiceProvider.class);
protected final MHostDao mhostDao = ComponentLocator.inject(MHostDaoImpl.class);
protected final SHostDao shostDao = ComponentLocator.inject(SHostDaoImpl.class);
protected final UserCredentialsDao ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class);
public final static long HEARTBEAT_INTERVAL = 10000;
private static ServiceProvider instance;
private Map<Class<?>, Object> serviceMap = new HashMap<Class<?>, Object>();
private Timer timer = new Timer();
private MHost mhost;
private MHostVO mhost;
private Properties properties;
private boolean useSubDomain = false; // use DNS sub domain for bucket name
private String serviceEndpoint = null;
@ -81,6 +88,8 @@ public class ServiceProvider {
protected ServiceProvider() throws IOException {
// register service implementation object
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
txn.close();
engine = new S3Engine();
EC2_engine = new EC2Engine();
serviceMap.put(AmazonS3SkeletonInterface.class, new S3SerializableServiceImplementation(engine));
@ -93,11 +102,9 @@ public class ServiceProvider {
try {
instance = new ServiceProvider();
instance.initialize();
PersistContext.commitTransaction();
} catch(Throwable e) {
logger.error("Unexpected exception " + e.getMessage(), e);
} finally {
PersistContext.closeSession();
}
}
return instance;
@ -172,27 +179,34 @@ public class ServiceProvider {
return properties;
}
public UserInfo getUserInfo(String accessKey)
throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException {
public UserInfo getUserInfo(String accessKey) {
UserInfo info = new UserInfo();
UserCredentialsDao credentialDao = new UserCredentialsDao();
UserCredentials cloudKeys = credentialDao.getByAccessKey( accessKey );
if ( null == cloudKeys ) {
logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" );
return null;
} else {
info.setAccessKey( accessKey );
info.setSecretKey( cloudKeys.getSecretKey());
info.setCanonicalUserId(accessKey);
info.setDescription( "S3 REST request" );
return info;
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
try {
txn.start();
UserCredentialsVO cloudKeys = ucDao.getByAccessKey( accessKey );
if ( null == cloudKeys ) {
logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" );
return null;
} else {
info.setAccessKey( accessKey );
info.setSecretKey( cloudKeys.getSecretKey());
info.setCanonicalUserId(accessKey);
info.setDescription( "S3 REST request" );
return info;
}
}finally {
txn.commit();
}
}
@DB
protected void initialize() {
if(logger.isInfoEnabled())
logger.info("Initializing ServiceProvider...");
Transaction txn = Transaction.open(Transaction.AWSAPI_DB);
//txn.close();
File file = ConfigurationHelper.findConfigurationFile("log4j-cloud.xml");
if(file != null) {
@ -226,14 +240,16 @@ public class ServiceProvider {
setupHost(hostKey, host);
// we will commit and start a new transaction to allow host info be flushed to DB
PersistContext.flush();
//PersistContext.flush();
String localStorageRoot = properties.getProperty("storage.root");
if (localStorageRoot != null) setupLocalStorage(localStorageRoot);
multipartDir = properties.getProperty("storage.multipartDir");
Transaction txn1 = Transaction.open(Transaction.AWSAPI_DB);
timer.schedule(getHeartbeatTask(), HEARTBEAT_INTERVAL, HEARTBEAT_INTERVAL);
txn1.close();
if(logger.isInfoEnabled())
logger.info("ServiceProvider initialized");
@ -264,45 +280,41 @@ public class ServiceProvider {
@Override
public void run() {
try {
MHostDao mhostDao = new MHostDao();
mhost.setLastHeartbeatTime(DateHelper.currentGMTTime());
mhostDao.update(mhost);
PersistContext.commitTransaction();
mhostDao.updateHeartBeat(mhost);
} catch(Throwable e){
logger.error("Unexpected exception " + e.getMessage(), e);
} finally {
PersistContext.closeSession();
}
}
};
}
private void setupHost(String hostKey, String host) {
MHostDao mhostDao = new MHostDao();
mhost = mhostDao.getByHostKey(hostKey);
mhost = mhostDao.getByHostKey(hostKey);
if(mhost == null) {
mhost = new MHost();
mhost = new MHostVO();
mhost.setHostKey(hostKey);
mhost.setHost(host);
mhost.setLastHeartbeatTime(DateHelper.currentGMTTime());
mhostDao.save(mhost);
mhost = mhostDao.persist(mhost);
} else {
mhost.setHost(host);
mhostDao.update(mhost);
mhostDao.update(mhost.getId(), mhost);
}
}
private void setupLocalStorage(String storageRoot) {
SHostDao shostDao = new SHostDao();
SHost shost = shostDao.getLocalStorageHost(mhost.getId(), storageRoot);
SHostVO shost = shostDao.getLocalStorageHost(mhost.getId(), storageRoot);
if(shost == null) {
shost = new SHost();
shost = new SHostVO();
shost.setMhost(mhost);
mhost.getLocalSHosts().add(shost);
shost.setMhostid(mhost.getId());
shost.setHostType(SHost.STORAGE_HOST_TYPE_LOCAL);
shost.setHost(NetHelper.getHostName());
shost.setExportRoot(storageRoot);
PersistContext.getSession().save(shost);
shostDao.persist(shost);
}
}
@ -318,35 +330,36 @@ public class ServiceProvider {
return (T) Proxy.newProxyInstance(serviceObject.getClass().getClassLoader(),
new Class[] { serviceInterface },
new InvocationHandler() {
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
Object result = null;
try {
result = method.invoke(serviceObject, args);
PersistContext.commitTransaction();
PersistContext.commitTransaction(true);
} catch (PersistException e) {
} catch (SessionException e) {
} catch(Throwable e) {
// Rethrow the exception to Axis:
// Check if the exception is an AxisFault or a RuntimeException
// enveloped AxisFault and if so, pass it on as such. Otherwise
// log to help debugging and throw as is.
if (e.getCause() != null && e.getCause() instanceof AxisFault)
throw e.getCause();
else if (e.getCause() != null && e.getCause().getCause() != null
&& e.getCause().getCause() instanceof AxisFault)
throw e.getCause().getCause();
else {
logger.warn("Unhandled exception " + e.getMessage(), e);
throw e;
}
} finally {
PersistContext.closeSession();
PersistContext.closeSession(true);
}
return result;
}
});
public Object invoke(Object proxy, Method method,
Object[] args) throws Throwable {
Object result = null;
try {
result = method.invoke(serviceObject, args);
} catch (Throwable e) {
// Rethrow the exception to Axis:
// Check if the exception is an AxisFault or a
// RuntimeException
// enveloped AxisFault and if so, pass it on as
// such. Otherwise
// log to help debugging and throw as is.
if (e.getCause() != null
&& e.getCause() instanceof AxisFault)
throw e.getCause();
else if (e.getCause() != null
&& e.getCause().getCause() != null
&& e.getCause().getCause() instanceof AxisFault)
throw e.getCause().getCause();
else {
logger.warn(
"Unhandled exception " + e.getMessage(),
e);
throw e;
}
} finally {
}
return result;
}
});
}
@SuppressWarnings("unchecked")

View File

@ -22,6 +22,9 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.security.SignatureException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.text.ParseException;
import java.util.ArrayList;
@ -32,12 +35,15 @@ import java.util.UUID;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.log4j.Logger;
import org.hibernate.ejb.criteria.expression.UnaryArithmeticOperation.Operation;
import org.xml.sax.SAXException;
import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDao;
import com.cloud.bridge.model.CloudStackServiceOfferingVO;
import com.cloud.bridge.persist.dao.CloudStackAccountDao;
import com.cloud.bridge.persist.dao.OfferingDao;
import com.cloud.bridge.persist.dao.CloudStackAccountDaoImpl;
import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDao;
import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDaoImpl;
import com.cloud.bridge.persist.dao.OfferingDaoImpl;
import com.cloud.bridge.persist.dao.SObjectItemDaoImpl;
import com.cloud.bridge.service.UserContext;
import com.cloud.bridge.service.core.ec2.EC2ImageAttributes.ImageAttribute;
@ -70,6 +76,8 @@ import com.cloud.stack.models.CloudStackUser;
import com.cloud.stack.models.CloudStackUserVm;
import com.cloud.stack.models.CloudStackVolume;
import com.cloud.stack.models.CloudStackZone;
import com.cloud.utils.component.ComponentLocator;
import com.cloud.utils.db.Transaction;
/**
* EC2Engine processes the ec2 commands and calls their cloudstack analogs
@ -80,6 +88,9 @@ public class EC2Engine {
String managementServer = null;
String cloudAPIPort = null;
protected final CloudStackSvcOfferingDao scvoDao = ComponentLocator.inject(CloudStackSvcOfferingDaoImpl.class);
protected final OfferingDaoImpl ofDao = ComponentLocator.inject(OfferingDaoImpl.class);
CloudStackAccountDao accDao = ComponentLocator.inject(CloudStackAccountDaoImpl.class);
private CloudStackApi _eng = null;
private CloudStackAccount currentAccount = null;
@ -110,7 +121,6 @@ public class EC2Engine {
managementServer = EC2Prop.getProperty( "managementServer" );
cloudAPIPort = EC2Prop.getProperty( "cloudAPIPort", null );
OfferingDao ofDao = new OfferingDao();
try {
if(ofDao.getOfferingCount() == 0) {
String strValue = EC2Prop.getProperty("m1.small.serviceId");
@ -1469,7 +1479,7 @@ public class EC2Engine {
if(request.getInstanceType() != null){
instanceType = request.getInstanceType();
}
CloudStackServiceOffering svcOffering = getCSServiceOfferingId(instanceType);
CloudStackServiceOfferingVO svcOffering = getCSServiceOfferingId(instanceType);
if(svcOffering == null){
logger.info("No ServiceOffering found to be defined by name, please contact the administrator "+instanceType );
throw new EC2ServiceException(ClientError.Unsupported, "instanceType: [" + instanceType + "] not found!");
@ -1521,6 +1531,7 @@ public class EC2Engine {
vm.setDomainId(resp.getDomainId());
vm.setHypervisor(resp.getHypervisor());
vm.setServiceOffering( svcOffering.getName());
vm.setKeyPairName(resp.getKeyPairName());
instances.addInstance(vm);
countCreated++;
}
@ -1778,12 +1789,11 @@ public class EC2Engine {
*
*/
private CloudStackServiceOffering getCSServiceOfferingId(String instanceType){
private CloudStackServiceOfferingVO getCSServiceOfferingId(String instanceType){
try {
if (null == instanceType) instanceType = "m1.small";
if (null == instanceType) instanceType = "m1.small";
CloudStackSvcOfferingDao dao = new CloudStackSvcOfferingDao();
return dao.getSvcOfferingByName(instanceType);
return scvoDao.getSvcOfferingByName(instanceType);
} catch(Exception e) {
logger.error( "Error while retrieving ServiceOffering information by name - ", e);
@ -1801,8 +1811,8 @@ public class EC2Engine {
*/
private String serviceOfferingIdToInstanceType( String serviceOfferingId ){
try{
CloudStackSvcOfferingDao dao = new CloudStackSvcOfferingDao();
CloudStackServiceOffering offering = dao.getSvcOfferingById(serviceOfferingId);
CloudStackServiceOfferingVO offering = scvoDao.getSvcOfferingById(serviceOfferingId); //dao.getSvcOfferingById(serviceOfferingId);
if(offering == null){
logger.warn( "No instanceType match for serviceOfferingId: [" + serviceOfferingId + "]" );
return "m1.small";
@ -1905,7 +1915,8 @@ public class EC2Engine {
ec2Vm.setRootDeviceType(cloudVm.getRootDeviceType());
ec2Vm.setRootDeviceId(cloudVm.getRootDeviceId());
ec2Vm.setServiceOffering(serviceOfferingIdToInstanceType(cloudVm.getServiceOfferingId().toString()));
ec2Vm.setKeyPairName(cloudVm.getKeyPairName());
List<CloudStackNic> nics = cloudVm.getNics();
for(CloudStackNic nic : nics) {
if(nic.getIsDefault()) {
@ -2258,9 +2269,7 @@ public class EC2Engine {
*/
private String getDefaultZoneId(String accountId) {
try {
CloudStackAccountDao dao = new CloudStackAccountDao();
CloudStackAccount account = dao.getdefaultZoneId(accountId);
return account.getDefaultZoneId();
return accDao.getDefaultZoneId(accountId);
} catch(Exception e) {
logger.error( "Error while retrieving Account information by id - ", e);
throw new EC2ServiceException(ServerError.InternalError, e.getMessage());

View File

@ -40,6 +40,7 @@ public class EC2Instance {
private String hypervisor;
private String rootDeviceType;
private String rootDeviceId;
private String keyPairName;
private List<String> groupSet;
private List<EC2TagKeyValue> tagsSet;
@ -60,6 +61,7 @@ public class EC2Instance {
hypervisor = null;
rootDeviceType = null;
rootDeviceId = null;
keyPairName = null;
groupSet = new ArrayList<String>();
tagsSet = new ArrayList<EC2TagKeyValue>();
}
@ -192,6 +194,14 @@ public class EC2Instance {
rootDeviceId = param;
}
public String getKeyPairName() {
return keyPairName;
}
public void setKeyPairName(String param) {
keyPairName = param;
}
public void addGroupName( String param ) {
groupSet.add( param );
}

File diff suppressed because it is too large Load Diff

View File

@ -19,6 +19,7 @@ package com.cloud.bridge.service.core.s3;
import java.util.List;
import com.cloud.bridge.model.SAcl;
import com.cloud.bridge.model.SAclVO;
import com.cloud.bridge.model.SBucket;
import com.cloud.bridge.service.exception.UnsupportedException;
@ -64,12 +65,12 @@ public class S3Grant {
/* Return an array of S3Grants holding the permissions of grantees by grantee type and their canonicalUserIds.
* Used by S3 engine to get ACL policy requests for buckets and objects.
*/
public static S3Grant[] toGrants(List<SAcl> grants) {
public static S3Grant[] toGrants(List<SAclVO> grants) {
if(grants != null)
{
S3Grant[] entries = new S3Grant[grants.size()];
int i = 0;
for(SAcl acl: grants) {
for(SAclVO acl: grants) {
entries[i] = new S3Grant();
entries[i].setGrantee(acl.getGranteeType());
entries[i].setCanonicalUserID(acl.getGranteeCanonicalId());

View File

@ -78,6 +78,7 @@ public class EC2ServiceException extends RuntimeException {
InvalidPermission_Malformed("Client.InvalidPermission.Malformed", 400),
InvalidReservationID_Malformed("Client.InvalidReservationID.Malformed", 400),
InvalidReservationID_NotFound("Client.InvalidReservationID.NotFound", 400),
InvalidResourceId_Format("Client.InvalidResourceId.Format", 400),
InvalidSnapshotID_Malformed("Client.InvalidSnapshotID.Malformed", 400),
InvalidSnapshot_NotFound("Client.InvalidSnapshot.NotFound", 400),
InvalidUserID_Malformed("Client.InvalidUserID.Malformed", 400),

View File

@ -1,106 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Properties;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.jasypt.properties.EncryptableProperties;
import org.apache.log4j.Logger;
public class CloudSessionFactory {
private static CloudSessionFactory instance;
public static final Logger logger = Logger.getLogger(CloudSessionFactory.class);
private SessionFactory factory;
private CloudSessionFactory() {
Configuration cfg = new Configuration();
File file = ConfigurationHelper.findConfigurationFile("hibernate.cfg.xml");
File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties");
Properties dbProp = null;
String dbName = null;
String dbHost = null;
String dbUser = null;
String dbPassword = null;
String dbPort = null;
if (null != propertiesFile) {
if(EncryptionSecretKeyCheckerUtil.useEncryption()){
StandardPBEStringEncryptor encryptor = EncryptionSecretKeyCheckerUtil.getEncryptor();
dbProp = new EncryptableProperties(encryptor);
} else {
dbProp = new Properties();
}
try {
dbProp.load( new FileInputStream( propertiesFile ));
} catch (FileNotFoundException e) {
logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e);
} catch (IOException e) {
logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e);
}
}
//
// we are packaging hibernate mapping files along with the class files,
// make sure class loader use the same class path when initializing hibernate mapping.
// This is important when we are deploying and testing at different environment (Tomcat/JUnit test runner)
//
if(file != null && dbProp != null){
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
cfg.configure(file);
dbHost = dbProp.getProperty( "db.cloud.host" );
dbName = dbProp.getProperty( "db.awsapi.name" );
dbUser = dbProp.getProperty( "db.cloud.username" );
dbPassword = dbProp.getProperty( "db.cloud.password" );
dbPort = dbProp.getProperty( "db.cloud.port" );
cfg.setProperty("hibernate.connection.url", "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName);
cfg.setProperty("hibernate.connection.username", dbUser);
cfg.setProperty("hibernate.connection.password", dbPassword);
factory = cfg.buildSessionFactory();
}else{
logger.warn("Unable to open load db configuration");
throw new RuntimeException("nable to open load db configuration");
}
}
public synchronized static CloudSessionFactory getInstance() {
if(instance == null) {
instance = new CloudSessionFactory();
}
return instance;
}
public Session openSession() {
return factory.openSession();
}
}

View File

@ -1,106 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.util;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Properties;
import org.hibernate.Session;
import org.hibernate.SessionFactory;
import org.hibernate.cfg.Configuration;
import org.jasypt.encryption.pbe.StandardPBEStringEncryptor;
import org.jasypt.properties.EncryptableProperties;
import org.apache.log4j.Logger;
public class CloudStackSessionFactory {
private static CloudStackSessionFactory instance;
public static final Logger logger = Logger.getLogger(CloudStackSessionFactory.class);
private SessionFactory factory;
private CloudStackSessionFactory() {
Configuration cfg = new Configuration();
File file = ConfigurationHelper.findConfigurationFile("CloudStack.cfg.xml");
File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties");
Properties dbProp = null;
String dbName = null;
String dbHost = null;
String dbUser = null;
String dbPassword = null;
String dbPort = null;
if (null != propertiesFile) {
if(EncryptionSecretKeyCheckerUtil.useEncryption()){
StandardPBEStringEncryptor encryptor = EncryptionSecretKeyCheckerUtil.getEncryptor();
dbProp = new EncryptableProperties(encryptor);
} else {
dbProp = new Properties();
}
try {
dbProp.load( new FileInputStream( propertiesFile ));
} catch (FileNotFoundException e) {
logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e);
} catch (IOException e) {
logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e);
}
}
//
// we are packaging hibernate mapping files along with the class files,
// make sure class loader use the same class path when initializing hibernate mapping.
// This is important when we are deploying and testing at different environment (Tomcat/JUnit test runner)
//
if(file != null && dbProp != null){
Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader());
cfg.configure(file);
dbHost = dbProp.getProperty( "db.cloud.host" );
dbName = dbProp.getProperty( "db.cloud.name" );
dbUser = dbProp.getProperty( "db.cloud.username" );
dbPassword = dbProp.getProperty( "db.cloud.password" );
dbPort = dbProp.getProperty( "db.cloud.port" );
cfg.setProperty("hibernate.connection.url", "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName);
cfg.setProperty("hibernate.connection.username", dbUser);
cfg.setProperty("hibernate.connection.password", dbPassword);
factory = cfg.buildSessionFactory();
}else{
logger.warn("Unable to open load db configuration");
throw new RuntimeException("nable to open load db configuration");
}
}
public synchronized static CloudStackSessionFactory getInstance() {
if(instance == null) {
instance = new CloudStackSessionFactory();
}
return instance;
}
public Session openSession() {
return factory.openSession();
}
}

View File

@ -1,85 +0,0 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.bridge.util;
import java.io.Serializable;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Calendar;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import org.hibernate.Query;
public class QueryHelper {
public static void bindParameters(Query query, Object[] params) {
int pos = 0;
if(params != null && params.length > 0) {
for(Object param : params) {
if(param instanceof Byte)
query.setByte(pos++, ((Byte)param).byteValue());
else if(param instanceof Short)
query.setShort(pos++, ((Short)param).shortValue());
else if(param instanceof Integer)
query.setInteger(pos++, ((Integer)param).intValue());
else if(param instanceof Long)
query.setLong(pos++, ((Long)param).longValue());
else if(param instanceof Float)
query.setFloat(pos++, ((Float)param).floatValue());
else if(param instanceof Double)
query.setDouble(pos++, ((Double)param).doubleValue());
else if(param instanceof Boolean)
query.setBoolean(pos++, ((Boolean)param).booleanValue());
else if(param instanceof Character)
query.setCharacter(pos++, ((Character)param).charValue());
else if(param instanceof Date)
query.setDate(pos++, (Date)param);
else if(param instanceof Calendar)
query.setCalendar(pos++, (Calendar)param);
else if(param instanceof CalendarDateParam)
query.setCalendarDate(pos++, ((CalendarDateParam)param).dateValue());
else if(param instanceof TimestampParam)
query.setTimestamp(pos++, ((TimestampParam)param).timestampValue());
else if(param instanceof TimeParam)
query.setTime(pos++, ((TimeParam)param).timeValue());
else if(param instanceof String)
query.setString(pos++, (String)param);
else if(param instanceof TextParam)
query.setText(pos++, ((TextParam)param).textValue());
else if(param instanceof byte[])
query.setBinary(pos++, (byte[])param);
else if(param instanceof BigDecimal)
query.setBigDecimal(pos++, (BigDecimal)param);
else if(param instanceof BigInteger)
query.setBigInteger(pos++, (BigInteger)param);
else if(param instanceof Locale)
query.setLocale(pos++, (Locale)param);
else if(param instanceof EntityParam)
query.setEntity(pos++, ((EntityParam)param).entityValue());
else if(param instanceof Serializable)
query.setSerializable(pos++, (Serializable)param);
else
query.setEntity(pos++, param);
}
}
}
public static <T> List<T> executeQuery(Query query) {
return (List<T>)query.list();
}
}

View File

@ -1,34 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.stack.models.CloudStackAccount" table="account" lazy="true">
<id name="id" type="string" column="uuid" >
</id>
<property name="name">
<column name="account_name" />
</property>
<property name="defaultZoneId">
<column name="default_zone_id" />
</property>
</class>
</hibernate-mapping>

View File

@ -1,37 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.stack.models.CloudStackConfiguration" table="configuration" lazy="true">
<id name="name" type="string" column="name" >
</id>
<property name="category">
<column name="category" />
</property>
<property name="value">
<column name="value" />
</property>
<property name="description">
<column name="description" />
</property>
</class>
</hibernate-mapping>

View File

@ -1,34 +0,0 @@
<?xml version="1.0"?>
<!--
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-->
<!DOCTYPE hibernate-mapping PUBLIC "-//Hibernate/Hibernate Mapping DTD 3.0//EN" "http://hibernate.sourceforge.net/hibernate-mapping-3.0.dtd">
<hibernate-mapping>
<class name="com.cloud.stack.models.CloudStackServiceOffering" table="disk_offering" lazy="true">
<id name="id" type="string" column="uuid" >
</id>
<property name="name">
<column name="name" />
</property>
<property name="domainId">
<column name="domain_id" />
</property>
</class>
</hibernate-mapping>

View File

@ -67,6 +67,8 @@ public class CloudStackUserVm {
private String jobId;
@SerializedName(ApiConstants.JOB_STATUS)
private Integer jobStatus;
@SerializedName(ApiConstants.SSH_KEYPAIR)
private String keyPairName;
@SerializedName(ApiConstants.MEMORY)
private Integer memory;
@SerializedName(ApiConstants.NAME)
@ -340,6 +342,13 @@ public class CloudStackUserVm {
return serviceOfferingName;
}
/**
* @return the sshKeyPairName
*/
public String getKeyPairName() {
return keyPairName;
}
/**
* @return the state
*/

Some files were not shown because too many files have changed in this diff Show More