From 9064236879dc9f2f11538d891271545e1ff10e9c Mon Sep 17 00:00:00 2001 From: Wido den Hollander Date: Fri, 31 Aug 2012 19:57:51 +0200 Subject: [PATCH 1/2] debian: Depend on the MySQL Java connector Ubuntu and Debian provide the JDBC MySQL connector, we depend on this for running the client --- debian/cloud-deps.install | 1 - debian/control | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/debian/cloud-deps.install b/debian/cloud-deps.install index 53e4f4a2583..e6ddde5bacb 100644 --- a/debian/cloud-deps.install +++ b/debian/cloud-deps.install @@ -27,7 +27,6 @@ /usr/share/java/cloud-log4j.jar /usr/share/java/cloud-trilead-ssh2-build213.jar /usr/share/java/cloud-cglib.jar -/usr/share/java/cloud-mysql-connector-java-5.1.7-bin.jar /usr/share/java/cloud-xenserver-5.6.100-1.jar /usr/share/java/cloud-xmlrpc-common-3.*.jar /usr/share/java/cloud-xmlrpc-client-3.*.jar diff --git a/debian/control b/debian/control index 1bc1848cf01..e730becef4a 100644 --- a/debian/control +++ b/debian/control @@ -86,7 +86,7 @@ Provides: vmops-client Conflicts: vmops-client Replaces: vmops-client Architecture: any -Depends: openjdk-6-jre, cloud-deps (= ${source:Version}), cloud-utils (= ${source:Version}), cloud-server (= ${source:Version}), cloud-client-ui (= ${source:Version}), cloud-setup (= ${source:Version}), cloud-python (= ${source:Version}), tomcat6, libws-commons-util-java, libcommons-dbcp-java, libcommons-collections-java, libcommons-httpclient-java, sysvinit-utils, chkconfig, sudo, jsvc, python-mysqldb, python-paramiko, augeas-tools, genisoimage, cloud-system-iso +Depends: openjdk-6-jre, cloud-deps (= ${source:Version}), cloud-utils (= ${source:Version}), cloud-server (= ${source:Version}), cloud-client-ui (= ${source:Version}), cloud-setup (= ${source:Version}), cloud-python (= ${source:Version}), tomcat6, libws-commons-util-java, libcommons-dbcp-java, libcommons-collections-java, libcommons-httpclient-java, sysvinit-utils, chkconfig, sudo, jsvc, python-mysqldb, python-paramiko, augeas-tools, genisoimage, cloud-system-iso, libmysql-java (>= 5.1) Description: CloudStack client The CloudStack management server is the central point of coordination, management, and intelligence in the CloudStack Cloud Stack. This package From 39aa7d86affccf42b2e31222fc7ad295ce7180f4 Mon Sep 17 00:00:00 2001 From: Rajesh Battala Date: Fri, 31 Aug 2012 11:44:07 -0700 Subject: [PATCH 2/2] Moved Awsapi (EC2/S3) from Hibernate framework to CloudStack Generic Dao Framework Created/Modified new VO's and Dao Impl classes to use Generic Dao Framework --- .../setup/cloudstack-aws-api-register | 2 +- awsapi/.classpath | 23 +- awsapi/conf/hibernate.cfg.xml | 51 - .../auth/ec2/AuthenticationHandler.java | 21 +- .../bridge/auth/s3/AuthenticationHandler.java | 10 +- .../lifecycle/ServiceEngineLifecycle.java | 4 +- .../cloud/bridge/model/BucketPolicyVO.java | 59 ++ .../bridge/model/CloudStackAccountVO.java | 34 + .../model/CloudStackConfigurationVO.java | 30 + .../model/CloudStackServiceOfferingVO.java | 51 + .../src/com/cloud/bridge/model/MHost.hbm.xml | 55 -- .../com/cloud/bridge/model/MHostMount.hbm.xml | 46 - .../{MHostMount.java => MHostMountVO.java} | 68 +- .../bridge/model/{MHost.java => MHostVO.java} | 50 +- .../cloud/bridge/model/MultiPartPartsVO.java | 108 +++ .../bridge/model/MultiPartUploadsVO.java | 94 ++ .../cloud/bridge/model/MultipartMetaVO.java | 59 ++ .../cloud/bridge/model/OfferingBundleVO.java | 46 + .../src/com/cloud/bridge/model/SAcl.hbm.xml | 54 -- awsapi/src/com/cloud/bridge/model/SAcl.java | 106 +-- awsapi/src/com/cloud/bridge/model/SAclVO.java | 254 +++++ .../com/cloud/bridge/model/SBucket.hbm.xml | 56 -- .../src/com/cloud/bridge/model/SBucket.java | 11 +- .../src/com/cloud/bridge/model/SBucketVO.java | 169 ++++ .../src/com/cloud/bridge/model/SHost.hbm.xml | 60 -- awsapi/src/com/cloud/bridge/model/SHost.java | 18 +- .../src/com/cloud/bridge/model/SHostVO.java | 152 +++ .../src/com/cloud/bridge/model/SMeta.hbm.xml | 41 - .../bridge/model/{SMeta.java => SMetaVO.java} | 32 +- .../com/cloud/bridge/model/SObject.hbm.xml | 59 -- .../cloud/bridge/model/SObjectItem.hbm.xml | 62 -- .../{SObjectItem.java => SObjectItemVO.java} | 64 +- .../model/{SObject.java => SObjectVO.java} | 80 +- .../bridge/model/UserCredentials.hbm.xml | 38 - ...redentials.java => UserCredentialsVO.java} | 36 +- .../com/cloud/bridge/persist/EntityDao.java | 118 --- .../bridge/persist/GMTDateTimeUserType.java | 102 -- .../cloud/bridge/persist/PersistContext.java | 359 -------- .../bridge/persist/PersistException.java | 36 - .../bridge/persist/dao/BucketPolicyDao.java | 157 +--- .../persist/dao/BucketPolicyDaoImpl.java | 72 ++ .../persist/dao/CloudStackAccountDao.java | 36 +- .../persist/dao/CloudStackAccountDaoImpl.java | 39 + .../dao/CloudStackConfigurationDao.java | 41 +- .../dao/CloudStackConfigurationDaoImpl.java | 45 + .../persist/dao/CloudStackSvcOfferingDao.java | 39 +- .../dao/CloudStackSvcOfferingDaoImpl.java | 75 ++ .../cloud/bridge/persist/dao/MHostDao.java | 34 +- .../bridge/persist/dao/MHostDaoImpl.java | 61 ++ .../bridge/persist/dao/MHostMountDao.java | 33 +- .../bridge/persist/dao/MHostMountDaoImpl.java | 48 + .../bridge/persist/dao/MultiPartPartsDao.java | 18 + .../persist/dao/MultiPartPartsDaoImpl.java | 101 ++ .../persist/dao/MultiPartUploadsDao.java | 21 + .../persist/dao/MultiPartUploadsDaoImpl.java | 108 +++ .../bridge/persist/dao/MultipartLoadDao.java | 430 +++------ .../bridge/persist/dao/MultipartMetaDao.java | 12 + .../persist/dao/MultipartMetaDaoImpl.java | 34 + .../cloud/bridge/persist/dao/OfferingDao.java | 167 +--- .../bridge/persist/dao/OfferingDaoImpl.java | 135 +++ .../com/cloud/bridge/persist/dao/SAclDao.java | 75 +- .../cloud/bridge/persist/dao/SAclDaoImpl.java | 127 +++ .../cloud/bridge/persist/dao/SBucketDao.java | 37 +- .../bridge/persist/dao/SBucketDaoImpl.java | 72 ++ .../cloud/bridge/persist/dao/SHostDao.java | 39 +- .../bridge/persist/dao/SHostDaoImpl.java | 67 ++ .../cloud/bridge/persist/dao/SMetaDao.java | 54 +- .../bridge/persist/dao/SMetaDaoImpl.java | 88 ++ .../cloud/bridge/persist/dao/SObjectDao.java | 77 +- .../bridge/persist/dao/SObjectDaoImpl.java | 119 +++ .../bridge/persist/dao/SObjectItemDao.java | 36 +- .../persist/dao/SObjectItemDaoImpl.java | 71 ++ .../persist/dao/UserCredentialsDao.java | 167 +--- .../persist/dao/UserCredentialsDaoImpl.java | 73 ++ .../cloud/bridge/service/EC2MainServlet.java | 20 +- .../cloud/bridge/service/EC2RestServlet.java | 69 +- .../cloud/bridge/service/S3RestServlet.java | 49 +- .../service/controller/s3/S3BucketAction.java | 870 +++++++++--------- .../service/controller/s3/S3ObjectAction.java | 18 +- .../controller/s3/ServiceProvider.java | 151 +-- .../bridge/service/core/ec2/EC2Engine.java | 35 +- .../bridge/service/core/s3/S3Engine.java | 808 ++++++++-------- .../cloud/bridge/service/core/s3/S3Grant.java | 5 +- .../bridge/util/CloudSessionFactory.java | 106 --- .../bridge/util/CloudStackSessionFactory.java | 106 --- .../com/cloud/bridge/util/QueryHelper.java | 85 -- .../stack/models/CloudStackAccount.hbm.xml | 34 - .../models/CloudStackConfiguration.hbm.xml | 37 - .../models/CloudStackServiceOffering.hbm.xml | 34 - build/build-aws-api.xml | 1 + client/tomcatconf/components.xml.in | 29 +- deps/awsapi-lib/cloud-cglib.jar | 0 deps/awsapi-lib/cloud-commons-dbcp-1.4.jar | 0 deps/awsapi-lib/cloud-commons-pool-1.5.6.jar | 0 deps/awsapi-lib/cloud-ehcache.jar | 0 .../cloud-javax.persistence-2.0.0.jar | 0 deps/awsapi-lib/cloud-utils.jar | 0 utils/src/com/cloud/utils/db/Transaction.java | 38 +- 98 files changed, 4118 insertions(+), 3903 deletions(-) delete mode 100644 awsapi/conf/hibernate.cfg.xml create mode 100644 awsapi/src/com/cloud/bridge/model/BucketPolicyVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/CloudStackAccountVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/CloudStackConfigurationVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/CloudStackServiceOfferingVO.java delete mode 100644 awsapi/src/com/cloud/bridge/model/MHost.hbm.xml delete mode 100644 awsapi/src/com/cloud/bridge/model/MHostMount.hbm.xml rename awsapi/src/com/cloud/bridge/model/{MHostMount.java => MHostMountVO.java} (56%) rename awsapi/src/com/cloud/bridge/model/{MHost.java => MHostVO.java} (65%) create mode 100644 awsapi/src/com/cloud/bridge/model/MultiPartPartsVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/MultiPartUploadsVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/MultipartMetaVO.java create mode 100644 awsapi/src/com/cloud/bridge/model/OfferingBundleVO.java delete mode 100644 awsapi/src/com/cloud/bridge/model/SAcl.hbm.xml create mode 100644 awsapi/src/com/cloud/bridge/model/SAclVO.java delete mode 100644 awsapi/src/com/cloud/bridge/model/SBucket.hbm.xml create mode 100644 awsapi/src/com/cloud/bridge/model/SBucketVO.java delete mode 100644 awsapi/src/com/cloud/bridge/model/SHost.hbm.xml create mode 100644 awsapi/src/com/cloud/bridge/model/SHostVO.java delete mode 100644 awsapi/src/com/cloud/bridge/model/SMeta.hbm.xml rename awsapi/src/com/cloud/bridge/model/{SMeta.java => SMetaVO.java} (74%) delete mode 100644 awsapi/src/com/cloud/bridge/model/SObject.hbm.xml delete mode 100644 awsapi/src/com/cloud/bridge/model/SObjectItem.hbm.xml rename awsapi/src/com/cloud/bridge/model/{SObjectItem.java => SObjectItemVO.java} (67%) rename awsapi/src/com/cloud/bridge/model/{SObject.java => SObjectVO.java} (68%) delete mode 100644 awsapi/src/com/cloud/bridge/model/UserCredentials.hbm.xml rename awsapi/src/com/cloud/bridge/model/{UserCredentials.java => UserCredentialsVO.java} (72%) delete mode 100644 awsapi/src/com/cloud/bridge/persist/EntityDao.java delete mode 100644 awsapi/src/com/cloud/bridge/persist/GMTDateTimeUserType.java delete mode 100644 awsapi/src/com/cloud/bridge/persist/PersistContext.java delete mode 100644 awsapi/src/com/cloud/bridge/persist/PersistException.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MHostDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MHostMountDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDao.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDao.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDao.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/OfferingDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SAclDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SBucketDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SHostDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SMetaDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SObjectDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDaoImpl.java create mode 100644 awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDaoImpl.java delete mode 100644 awsapi/src/com/cloud/bridge/util/CloudSessionFactory.java delete mode 100644 awsapi/src/com/cloud/bridge/util/CloudStackSessionFactory.java delete mode 100644 awsapi/src/com/cloud/bridge/util/QueryHelper.java delete mode 100644 awsapi/src/com/cloud/stack/models/CloudStackAccount.hbm.xml delete mode 100644 awsapi/src/com/cloud/stack/models/CloudStackConfiguration.hbm.xml delete mode 100644 awsapi/src/com/cloud/stack/models/CloudStackServiceOffering.hbm.xml create mode 100644 deps/awsapi-lib/cloud-cglib.jar create mode 100644 deps/awsapi-lib/cloud-commons-dbcp-1.4.jar create mode 100644 deps/awsapi-lib/cloud-commons-pool-1.5.6.jar create mode 100644 deps/awsapi-lib/cloud-ehcache.jar create mode 100644 deps/awsapi-lib/cloud-javax.persistence-2.0.0.jar create mode 100644 deps/awsapi-lib/cloud-utils.jar diff --git a/awsapi-setup/setup/cloudstack-aws-api-register b/awsapi-setup/setup/cloudstack-aws-api-register index 19b53fd26c5..ea57d0b9dcf 100644 --- a/awsapi-setup/setup/cloudstack-aws-api-register +++ b/awsapi-setup/setup/cloudstack-aws-api-register @@ -1,4 +1,4 @@ -#!/cygdrive/c/python26/python +#!/usr/bin/python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file diff --git a/awsapi/.classpath b/awsapi/.classpath index 4dc46c4b1ec..c5e598f9f43 100644 --- a/awsapi/.classpath +++ b/awsapi/.classpath @@ -1,25 +1,6 @@ - - @@ -44,7 +25,6 @@ under the License. - @@ -81,5 +61,8 @@ under the License. + + + diff --git a/awsapi/conf/hibernate.cfg.xml b/awsapi/conf/hibernate.cfg.xml deleted file mode 100644 index d484849fd07..00000000000 --- a/awsapi/conf/hibernate.cfg.xml +++ /dev/null @@ -1,51 +0,0 @@ - - - - - - - com.mysql.jdbc.Driver - 20 - false - - - 2 - - true - org.hibernate.dialect.MySQLDialect - - false - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/auth/ec2/AuthenticationHandler.java b/awsapi/src/com/cloud/bridge/auth/ec2/AuthenticationHandler.java index 3b28c001b7d..f79feaad5bc 100644 --- a/awsapi/src/com/cloud/bridge/auth/ec2/AuthenticationHandler.java +++ b/awsapi/src/com/cloud/bridge/auth/ec2/AuthenticationHandler.java @@ -37,15 +37,16 @@ import java.io.InputStream; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; -import com.cloud.bridge.model.UserCredentials; -import com.cloud.bridge.persist.dao.UserCredentialsDao; +import com.cloud.bridge.model.UserCredentialsVO; +import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl; import com.cloud.bridge.service.UserContext; import com.cloud.bridge.util.AuthenticationUtils; +import com.cloud.utils.component.ComponentLocator; public class AuthenticationHandler implements Handler { protected final static Logger logger = Logger.getLogger(AuthenticationHandler.class); - + protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class); private DocumentBuilderFactory dbf = null; protected HandlerDescription handlerDesc = new HandlerDescription( "EC2AuthenticationHandler" ); @@ -111,13 +112,15 @@ public class AuthenticationHandler implements Handler { logger.debug( "X509 cert's uniqueId: " + uniqueId ); // -> find the Cloud API key and the secret key from the cert's uniqueId - UserCredentialsDao credentialDao = new UserCredentialsDao(); +/* UserCredentialsDao credentialDao = new UserCredentialsDao(); UserCredentials cloudKeys = credentialDao.getByCertUniqueId( uniqueId ); - if ( null == cloudKeys ) { - logger.error( "Cert does not map to Cloud API keys: " + uniqueId ); - throw new AxisFault( "User not properly registered: Certificate does not map to Cloud API Keys", "Client.Blocked" ); - } - else UserContext.current().initContext( cloudKeys.getAccessKey(), cloudKeys.getSecretKey(), cloudKeys.getAccessKey(), "SOAP Request", null ); +*/ + UserCredentialsVO cloudKeys = ucDao.getByCertUniqueId(uniqueId); + if ( null == cloudKeys ) { + logger.error( "Cert does not map to Cloud API keys: " + uniqueId ); + throw new AxisFault( "User not properly registered: Certificate does not map to Cloud API Keys", "Client.Blocked" ); + } + else UserContext.current().initContext( cloudKeys.getAccessKey(), cloudKeys.getSecretKey(), cloudKeys.getAccessKey(), "SOAP Request", null ); //System.out.println( "end of cert match: " + UserContext.current().getSecretKey()); } } diff --git a/awsapi/src/com/cloud/bridge/auth/s3/AuthenticationHandler.java b/awsapi/src/com/cloud/bridge/auth/s3/AuthenticationHandler.java index f7a1a7e8c6e..b9519169632 100644 --- a/awsapi/src/com/cloud/bridge/auth/s3/AuthenticationHandler.java +++ b/awsapi/src/com/cloud/bridge/auth/s3/AuthenticationHandler.java @@ -29,10 +29,11 @@ import org.apache.axis2.AxisFault; import org.apache.axis2.description.HandlerDescription; import org.apache.axis2.description.Parameter; -import com.cloud.bridge.model.UserCredentials; -import com.cloud.bridge.persist.dao.UserCredentialsDao; +import com.cloud.bridge.model.UserCredentialsVO; +import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl; import com.cloud.bridge.service.UserContext; import com.cloud.bridge.util.S3SoapAuth; +import com.cloud.utils.component.ComponentLocator; /* * For SOAP compatibility. @@ -40,7 +41,7 @@ import com.cloud.bridge.util.S3SoapAuth; public class AuthenticationHandler implements Handler { protected final static Logger logger = Logger.getLogger(AuthenticationHandler.class); - + protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class); protected HandlerDescription handlerDesc = new HandlerDescription( "default handler" ); private String name = "S3AuthenticationHandler"; @@ -190,8 +191,7 @@ public class AuthenticationHandler implements Handler { private String lookupSecretKey( String accessKey ) throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - UserCredentialsDao credentialDao = new UserCredentialsDao(); - UserCredentials cloudKeys = credentialDao.getByAccessKey( accessKey ); + UserCredentialsVO cloudKeys = ucDao.getByAccessKey( accessKey ); if ( null == cloudKeys ) { logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" ); return null; diff --git a/awsapi/src/com/cloud/bridge/lifecycle/ServiceEngineLifecycle.java b/awsapi/src/com/cloud/bridge/lifecycle/ServiceEngineLifecycle.java index 88c8729d902..73cb801cec4 100644 --- a/awsapi/src/com/cloud/bridge/lifecycle/ServiceEngineLifecycle.java +++ b/awsapi/src/com/cloud/bridge/lifecycle/ServiceEngineLifecycle.java @@ -21,8 +21,8 @@ import org.apache.axis2.description.AxisService; import org.apache.axis2.engine.ServiceLifeCycle; import org.apache.log4j.Logger; -import com.cloud.bridge.persist.dao.UserCredentialsDao; import com.cloud.bridge.service.controller.s3.ServiceProvider; +import com.cloud.utils.db.Transaction; /** @@ -38,7 +38,7 @@ public class ServiceEngineLifecycle implements ServiceLifeCycle { public void startUp(ConfigurationContext config, AxisService service) { // initialize service provider during Axis engine startup try{ - UserCredentialsDao.preCheckTableExistence(); + //UserCredentialsDao.preCheckTableExistence(); ServiceProvider.getInstance(); ServiceEngineLifecycle.initialized = true; }catch(Exception e){ diff --git a/awsapi/src/com/cloud/bridge/model/BucketPolicyVO.java b/awsapi/src/com/cloud/bridge/model/BucketPolicyVO.java new file mode 100644 index 00000000000..c4be142d5c7 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/BucketPolicyVO.java @@ -0,0 +1,59 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name="bucket_policies") +public class BucketPolicyVO { + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private long id; + + @Column(name="BucketName") + private String bucketName; + + @Column(name="OwnerCanonicalID") + private String ownerCanonicalID; + + @Column(name="Policy") + private String policy; + + public BucketPolicyVO() { } + public BucketPolicyVO(String bucketName, String client, String policy) { + this.bucketName = bucketName; + this.ownerCanonicalID = client; + this.policy = policy; + } + + public long getId() { + return id; + } + public void setId(long id) { + this.id = id; + } + public String getBucketName() { + return bucketName; + } + public void setBucketName(String bucketName) { + this.bucketName = bucketName; + } + public String getOwnerCanonicalID() { + return ownerCanonicalID; + } + public void setOwnerCanonicalID(String ownerCanonicalID) { + this.ownerCanonicalID = ownerCanonicalID; + } + public String getPolicy() { + return policy; + } + public void setPolicy(String policy) { + this.policy = policy; + } +} diff --git a/awsapi/src/com/cloud/bridge/model/CloudStackAccountVO.java b/awsapi/src/com/cloud/bridge/model/CloudStackAccountVO.java new file mode 100644 index 00000000000..5737abb9e79 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/CloudStackAccountVO.java @@ -0,0 +1,34 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Table; + +@Entity +@Table(name="account") +public class CloudStackAccountVO { + + @Column(name="uuid") + private String uuid; + + @Column(name="default_zone_id") + private Long defaultZoneId = null; + + public String getUuid() { + return uuid; + } + + public void setUuid(String uuid) { + this.uuid = uuid; + } + + public Long getDefaultZoneId() { + return defaultZoneId; + } + + public void setDefaultZoneId(Long defaultZoneId) { + this.defaultZoneId = defaultZoneId; + } + + +} diff --git a/awsapi/src/com/cloud/bridge/model/CloudStackConfigurationVO.java b/awsapi/src/com/cloud/bridge/model/CloudStackConfigurationVO.java new file mode 100644 index 00000000000..982969f73dd --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/CloudStackConfigurationVO.java @@ -0,0 +1,30 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.Id; +import javax.persistence.Table; + +import com.cloud.utils.db.DB; + +@Entity +@Table(name="configuration") +public class CloudStackConfigurationVO { + @Id + @Column(name="name") + private String name; + + @Column(name="value", length=4095) + private String value; + + @DB + public String getValue() { + return value; + } + + public String getName() { + return name; + } + + +} diff --git a/awsapi/src/com/cloud/bridge/model/CloudStackServiceOfferingVO.java b/awsapi/src/com/cloud/bridge/model/CloudStackServiceOfferingVO.java new file mode 100644 index 00000000000..23e0cc850d2 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/CloudStackServiceOfferingVO.java @@ -0,0 +1,51 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name="disk_offering") +public class CloudStackServiceOfferingVO { + + @Id + @Column(name="id") + private String id; + + @Column(name="name") + private String name; + + @Column(name="domain_id") + private String domainId; + + + public String getId() { + return id; + } + + + public String getName() { + return name; + } + + + public void setName(String name) { + this.name = name; + } + + + public String getDomainId() { + return domainId; + } + + + public void setDomainId(String domainId) { + this.domainId = domainId; + } + + + +} diff --git a/awsapi/src/com/cloud/bridge/model/MHost.hbm.xml b/awsapi/src/com/cloud/bridge/model/MHost.hbm.xml deleted file mode 100644 index e151171ee9a..00000000000 --- a/awsapi/src/com/cloud/bridge/model/MHost.hbm.xml +++ /dev/null @@ -1,55 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/MHostMount.hbm.xml b/awsapi/src/com/cloud/bridge/model/MHostMount.hbm.xml deleted file mode 100644 index c186de56cc4..00000000000 --- a/awsapi/src/com/cloud/bridge/model/MHostMount.hbm.xml +++ /dev/null @@ -1,46 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/MHostMount.java b/awsapi/src/com/cloud/bridge/model/MHostMountVO.java similarity index 56% rename from awsapi/src/com/cloud/bridge/model/MHostMount.java rename to awsapi/src/com/cloud/bridge/model/MHostMountVO.java index 771ed8b6aab..a008658a1be 100644 --- a/awsapi/src/com/cloud/bridge/model/MHostMount.java +++ b/awsapi/src/com/cloud/bridge/model/MHostMountVO.java @@ -19,18 +19,46 @@ package com.cloud.bridge.model; import java.io.Serializable; import java.util.Date; -public class MHostMount implements Serializable { - private static final long serialVersionUID = -1119494563131099642L; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Transient; +@Entity +@Table(name="mhost_mount") +public class MHostMountVO implements Serializable { + private static final long serialVersionUID = -1119494563131099642L; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; - private MHost mhost; - private SHost shost; + @Column(name="MHostID") + private long mHostID; + @Column(name="SHostID") + private long sHostID; + + @Transient + private MHostVO mhost; + + @Transient + private SHostVO shost; + + @Column(name="MountPath") private String mountPath; + + @Column(name="LastMountTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date lastMountTime; - public MHostMount() { + public MHostMountVO() { } public Long getId() { @@ -41,19 +69,35 @@ public class MHostMount implements Serializable { this.id = id; } - public MHost getMhost() { + public long getmHostID() { + return mHostID; + } + + public void setmHostID(long mHostID) { + this.mHostID = mHostID; + } + + public long getsHostID() { + return sHostID; + } + + public void setsHostID(long sHostID) { + this.sHostID = sHostID; + } + + public MHostVO getMhost() { return mhost; } - public void setMhost(MHost mhost) { + public void setMhost(MHostVO mhost) { this.mhost = mhost; } - public SHost getShost() { + public SHostVO getShost() { return shost; } - public void setShost(SHost shost) { + public void setShost(SHostVO shost) { this.shost = shost; } @@ -78,11 +122,11 @@ public class MHostMount implements Serializable { if(this == other) return true; - if(!(other instanceof MHostMount)) + if(!(other instanceof MHostMountVO)) return false; - return getMhost().equals(((MHostMount)other).getMhost()) && - getShost().equals(((MHostMount)other).getShost()); + return getMhost().equals(((MHostMountVO)other).getMhost()) && + getShost().equals(((MHostMountVO)other).getShost()); } @Override diff --git a/awsapi/src/com/cloud/bridge/model/MHost.java b/awsapi/src/com/cloud/bridge/model/MHostVO.java similarity index 65% rename from awsapi/src/com/cloud/bridge/model/MHost.java rename to awsapi/src/com/cloud/bridge/model/MHostVO.java index 2187c7ee546..0be74b7fe90 100644 --- a/awsapi/src/com/cloud/bridge/model/MHost.java +++ b/awsapi/src/com/cloud/bridge/model/MHostVO.java @@ -21,20 +21,46 @@ import java.util.Date; import java.util.HashSet; import java.util.Set; -public class MHost implements Serializable { - private static final long serialVersionUID = 4848254624679753930L; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Transient; +@Entity +@Table(name="mhost") +public class MHostVO implements Serializable { + private static final long serialVersionUID = 4848254624679753930L; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; + @Column(name="MHostKey", nullable=false) private String hostKey; + + @Column(name="Host") private String host; + + @Column(name="Version") private String version; + + @Column(name="LastHeartbeatTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date lastHeartbeatTime; + + @Transient + private Set localSHosts = new HashSet(); + + @Transient + private Set mounts = new HashSet(); - private Set localSHosts = new HashSet(); - private Set mounts = new HashSet(); - - public MHost() { + public MHostVO() { } public Long getId() { @@ -77,19 +103,19 @@ public class MHost implements Serializable { this.lastHeartbeatTime = lastHeartbeatTime; } - public Set getLocalSHosts() { + public Set getLocalSHosts() { return localSHosts; } - public void setLocalSHosts(Set localSHosts) { + public void setLocalSHosts(Set localSHosts) { this.localSHosts = localSHosts; } - public Set getMounts() { + public Set getMounts() { return mounts; } - public void setMounts(Set mounts) { + public void setMounts(Set mounts) { this.mounts = mounts; } @@ -98,10 +124,10 @@ public class MHost implements Serializable { if(this == other) return true; - if(!(other instanceof MHost)) + if(!(other instanceof MHostVO)) return false; - return hostKey == ((MHost)other).getHostKey(); + return hostKey == ((MHostVO)other).getHostKey(); } @Override diff --git a/awsapi/src/com/cloud/bridge/model/MultiPartPartsVO.java b/awsapi/src/com/cloud/bridge/model/MultiPartPartsVO.java new file mode 100644 index 00000000000..d622e47a691 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/MultiPartPartsVO.java @@ -0,0 +1,108 @@ +package com.cloud.bridge.model; + +import java.util.Date; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +@Entity +@Table(name="multipart_parts") +public class MultiPartPartsVO { + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + + @Column(name="UploadID") + private Long uploadid; + + @Column(name="partNumber") + private int partNumber; + + @Column(name="MD5") + private String md5; + + @Column(name="StoredPath") + private String storedPath; + + @Column(name="StoredSize") + private Long storedSize; + + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) + private Date createTime; + + public MultiPartPartsVO() { } + + public MultiPartPartsVO(int uploadId, int partNumber, String md5, + String storedPath, int size, Date date) { + this.uploadid = new Long(uploadId); + this.partNumber = partNumber; + this.md5 = md5; + this.storedPath = storedPath; + this.storedSize = new Long(size); + this.createTime = date; + } + + public Long getUploadid() { + return uploadid; + } + + public void setUploadid(Long uploadid) { + this.uploadid = uploadid; + } + + public int getPartNumber() { + return partNumber; + } + + public void setPartNumber(int partNumber) { + this.partNumber = partNumber; + } + + public String getMd5() { + return md5; + } + + public void setMd5(String md5) { + this.md5 = md5; + } + + public String getStoredPath() { + return storedPath; + } + + public void setStoredPath(String storedPath) { + this.storedPath = storedPath; + } + + public Long getStoredSize() { + return storedSize; + } + + public void setStoredSize(Long storedSize) { + this.storedSize = storedSize; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Long getId() { + return id; + } + + + +} diff --git a/awsapi/src/com/cloud/bridge/model/MultiPartUploadsVO.java b/awsapi/src/com/cloud/bridge/model/MultiPartUploadsVO.java new file mode 100644 index 00000000000..fd0bb42f4c2 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/MultiPartUploadsVO.java @@ -0,0 +1,94 @@ +package com.cloud.bridge.model; + +import java.util.Date; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +@Entity +@Table(name="multipart_uploads") + +public class MultiPartUploadsVO { + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + + @Column(name="AccessKey") + private String accessKey; + + @Column(name="BucketName") + private String bucketName; + + @Column(name="NameKey") + private String nameKey; + + @Column(name="x_amz_acl") + private String amzAcl; + + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) + private Date createTime; + + public MultiPartUploadsVO() {} + + public MultiPartUploadsVO(String accessKey, String bucketName, String key, String cannedAccess, Date tod) { + this.accessKey = accessKey; + this.bucketName = bucketName; + this.nameKey = key; + this.amzAcl = cannedAccess; + this.createTime = tod; + } + + public Long getId() { + return id; + } + + public String getAccessKey() { + return accessKey; + } + + public void setAccessKey(String accessKey) { + this.accessKey = accessKey; + } + + public String getBucketName() { + return bucketName; + } + + public void setBucketName(String bucketName) { + this.bucketName = bucketName; + } + + public String getNameKey() { + return nameKey; + } + + public void setNameKey(String nameKey) { + this.nameKey = nameKey; + } + + public String getAmzAcl() { + return amzAcl; + } + + public void setAmzAcl(String amzAcl) { + this.amzAcl = amzAcl; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + +} \ No newline at end of file diff --git a/awsapi/src/com/cloud/bridge/model/MultipartMetaVO.java b/awsapi/src/com/cloud/bridge/model/MultipartMetaVO.java new file mode 100644 index 00000000000..ca2b69ee64f --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/MultipartMetaVO.java @@ -0,0 +1,59 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name="multipart_meta") + +public class MultipartMetaVO { + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + + @Column(name="UploadID") + private long uploadID; + + @Column(name="Name") + private String name; + + @Column(name="Value") + private String value; + + public long getID() { + return id; + } + + public long getUploadID() { + return uploadID; + } + + public void setUploadID(long uploadID) { + this.uploadID = uploadID; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getValue() { + return value; + } + + public void setValue(String value) { + this.value = value; + } + + + +} diff --git a/awsapi/src/com/cloud/bridge/model/OfferingBundleVO.java b/awsapi/src/com/cloud/bridge/model/OfferingBundleVO.java new file mode 100644 index 00000000000..4c41cff451a --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/OfferingBundleVO.java @@ -0,0 +1,46 @@ +package com.cloud.bridge.model; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + + +@Entity +@Table(name="offering_bundle") +public class OfferingBundleVO { + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private long id; + + @Column(name="AmazonEC2Offering") + private String amazonOffering; + + @Column(name="CloudStackOffering") + private String cloudstackOffering; + + public long getID() { + return id; + } + + public String getAmazonOffering() { + return amazonOffering; + } + + public void setAmazonOffering(String amazonOffering) { + this.amazonOffering = amazonOffering; + } + + public String getCloudstackOffering() { + return cloudstackOffering; + } + + public void setCloudstackOffering(String cloudstackOffering) { + this.cloudstackOffering = cloudstackOffering; + } + + +} diff --git a/awsapi/src/com/cloud/bridge/model/SAcl.hbm.xml b/awsapi/src/com/cloud/bridge/model/SAcl.hbm.xml deleted file mode 100644 index 175e1a7808d..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SAcl.hbm.xml +++ /dev/null @@ -1,54 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SAcl.java b/awsapi/src/com/cloud/bridge/model/SAcl.java index 7cbcabee316..af0bb5eee91 100644 --- a/awsapi/src/com/cloud/bridge/model/SAcl.java +++ b/awsapi/src/com/cloud/bridge/model/SAcl.java @@ -37,8 +37,7 @@ import com.cloud.bridge.util.Triple; * < permission1, permission2, symbol > * when given an aclRequestString, a target (i.e. bucket or object) and the ID of the owner. */ -public class SAcl implements Serializable { - private static final long serialVersionUID = 7900837117165018850L; +public interface SAcl { public static final int GRANTEE_USER = 0; public static final int GRANTEE_ALLUSERS = 1; @@ -52,95 +51,6 @@ public class SAcl implements Serializable { public static final int PERMISSION_WRITE_ACL = 8; public static final int PERMISSION_FULL = (PERMISSION_READ | PERMISSION_WRITE | PERMISSION_READ_ACL | PERMISSION_WRITE_ACL); - private Long id; - - private String target; - private long targetId; - - private int granteeType; - private String granteeCanonicalId; - - private int permission; - private int grantOrder; - - private Date createTime; - private Date lastModifiedTime; - - public SAcl() { - } - - public Long getId() { - return id; - } - - private void setId(Long id) { - this.id = id; - } - - public String getTarget() { - return target; - } - - public void setTarget(String target) { - this.target = target; - } - - public long getTargetId() { - return targetId; - } - - public void setTargetId(long targetId) { - this.targetId = targetId; - } - - public int getGranteeType() { - return granteeType; - } - - public void setGranteeType(int granteeType) { - this.granteeType = granteeType; - } - - public String getGranteeCanonicalId() { - return granteeCanonicalId; - } - - public void setGranteeCanonicalId(String granteeCanonicalId) { - this.granteeCanonicalId = granteeCanonicalId; - } - - public int getPermission() { - return permission; - } - - public void setPermission(int permission) { - this.permission = permission; - } - - public int getGrantOrder() { - return grantOrder; - } - - public void setGrantOrder(int grantOrder) { - this.grantOrder = grantOrder; - } - - public Date getCreateTime() { - return createTime; - } - - public void setCreateTime(Date createTime) { - this.createTime = createTime; - } - - public Date getLastModifiedTime() { - return lastModifiedTime; - } - - public void setLastModifiedTime(Date lastModifiedTime) { - this.lastModifiedTime = lastModifiedTime; - } - /** Return an OrderedPair * < permission, grantee > * comprising @@ -153,9 +63,9 @@ public class SAcl implements Serializable { * @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs * @param target - Either "SBucket" or otherwise assumed to be for a single object item */ - public static OrderedPair getCannedAccessControls ( String aclRequestString, String target ) - throws UnsupportedException - { + //public static OrderedPair getCannedAccessControls ( String aclRequestString, String target ); + +/* { if ( aclRequestString.equalsIgnoreCase( "public-read" )) // All users granted READ access. return new OrderedPair (PERMISSION_READ,GRANTEE_ALLUSERS); @@ -184,7 +94,7 @@ public class SAcl implements Serializable { } else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" ); } - +*/ /** Return a Triple * < permission1, permission2, symbol > * comprising @@ -200,8 +110,8 @@ public class SAcl implements Serializable { * @param target - Either "SBucket" or otherwise assumed to be for a single object item * @param ownerID - An ID for the owner, if used in place of symbols "A" or "*" */ - public static Triple getCannedAccessControls ( String aclRequestString, String target, String ownerID ) - throws UnsupportedException + //public static Triple getCannedAccessControls ( String aclRequestString, String target, String ownerID ); +/* throws UnsupportedException { if ( aclRequestString.equalsIgnoreCase( "public-read" )) // Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ access. @@ -235,5 +145,5 @@ public class SAcl implements Serializable { } else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" ); } - +*/ } diff --git a/awsapi/src/com/cloud/bridge/model/SAclVO.java b/awsapi/src/com/cloud/bridge/model/SAclVO.java new file mode 100644 index 00000000000..96a252f42da --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/SAclVO.java @@ -0,0 +1,254 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.model; + +import java.util.Date; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; + +import com.cloud.bridge.service.exception.UnsupportedException; +import com.cloud.bridge.util.OrderedPair; +import com.cloud.bridge.util.Triple; + +/** + * A model of stored ACLs to remember the ACL permissions per canonicalUserID per grantee + * Hold the AWS S3 grantee and permission constants. + * + * This class implements two forms of getCannedAccessControls mappings, as static methods, + * + * (a) an OrderedPair which provides a maplet across + * < permission, grantee > + * when given an aclRequestString and a target (i.e. bucket or object), + * + * (b) a Triplet + * < permission1, permission2, symbol > + * when given an aclRequestString, a target (i.e. bucket or object) and the ID of the owner. + */ +@Entity +@Table(name="acl") +public class SAclVO implements SAcl { + private static final long serialVersionUID = 7900837117165018850L; + + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + @Column(name="Target") + private String target; + + @Column(name="TargetID") + private long targetId; + + @Column(name="GranteeType") + private int granteeType; + + @Column(name="GranteeCanonicalID") + private String granteeCanonicalId; + + @Column(name="Permission") + private int permission; + + @Column(name="GrantOrder") + private int grantOrder; + + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) + private Date createTime; + + @Column(name="LastModifiedTime") + @Temporal(value=TemporalType.TIMESTAMP) + private Date lastModifiedTime; + + public SAclVO() { + } + + public Long getId() { + return id; + } + + private void setId(Long id) { + this.id = id; + } + + public String getTarget() { + return target; + } + + public void setTarget(String target) { + this.target = target; + } + + public long getTargetId() { + return targetId; + } + + public void setTargetId(long targetId) { + this.targetId = targetId; + } + + public int getGranteeType() { + return granteeType; + } + + public void setGranteeType(int granteeType) { + this.granteeType = granteeType; + } + + public String getGranteeCanonicalId() { + return granteeCanonicalId; + } + + public void setGranteeCanonicalId(String granteeCanonicalId) { + this.granteeCanonicalId = granteeCanonicalId; + } + + public int getPermission() { + return permission; + } + + public void setPermission(int permission) { + this.permission = permission; + } + + public int getGrantOrder() { + return grantOrder; + } + + public void setGrantOrder(int grantOrder) { + this.grantOrder = grantOrder; + } + + public Date getCreateTime() { + return createTime; + } + + public void setCreateTime(Date createTime) { + this.createTime = createTime; + } + + public Date getLastModifiedTime() { + return lastModifiedTime; + } + + public void setLastModifiedTime(Date lastModifiedTime) { + this.lastModifiedTime = lastModifiedTime; + } + + /** Return an OrderedPair + * < permission, grantee > + * comprising + * a permission - which is one of SAcl.PERMISSION_PASS, SAcl.PERMISSION_NONE, SAcl.PERMISSION_READ, + * SAcl.PERMISSION_WRITE, SAcl.PERMISSION_READ_ACL, SAcl.PERMISSION_WRITE_ACL, SAcl.PERMISSION_FULL + * a grantee - which is one of GRANTEE_ALLUSERS, GRANTEE_AUTHENTICATED, GRANTEE_USER + * + * Access controls that are specified via the "x-amz-acl:" headers in REST requests for buckets. + * The ACL request string is treated as a request for a known cannedAccessPolicy + * @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs + * @param target - Either "SBucket" or otherwise assumed to be for a single object item + */ + public static OrderedPair getCannedAccessControls ( String aclRequestString, String target ) + throws UnsupportedException + { + if ( aclRequestString.equalsIgnoreCase( "public-read" )) + // All users granted READ access. + return new OrderedPair (PERMISSION_READ,GRANTEE_ALLUSERS); + else if (aclRequestString.equalsIgnoreCase( "public-read-write" )) + // All users granted READ and WRITE access + return new OrderedPair ((PERMISSION_READ | PERMISSION_WRITE),GRANTEE_ALLUSERS); + else if (aclRequestString.equalsIgnoreCase( "authenticated-read" )) + // Authenticated users have READ access + return new OrderedPair (PERMISSION_READ,GRANTEE_AUTHENTICATED); + else if (aclRequestString.equalsIgnoreCase( "private" )) + // Only Owner gets FULL_CONTROL + return new OrderedPair (PERMISSION_FULL,GRANTEE_USER); + else if (aclRequestString.equalsIgnoreCase( "bucket-owner-read" )) + { + // Object Owner gets FULL_CONTROL, Bucket Owner gets READ + if ( target.equalsIgnoreCase( "SBucket" )) + return new OrderedPair (PERMISSION_READ, GRANTEE_USER); + else + return new OrderedPair (PERMISSION_FULL, GRANTEE_USER); + } + else if (aclRequestString.equalsIgnoreCase( "bucket-owner-full-control" )) + { + // Object Owner gets FULL_CONTROL, Bucket Owner gets FULL_CONTROL + // This is equivalent to private when used with PUT Bucket + return new OrderedPair (PERMISSION_FULL,GRANTEE_USER); + } + else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" ); + } + + /** Return a Triple + * < permission1, permission2, symbol > + * comprising + * two permissions - which is one of SAcl.PERMISSION_PASS, SAcl.PERMISSION_NONE, SAcl.PERMISSION_READ, + * SAcl.PERMISSION_WRITE, SAcl.PERMISSION_READ_ACL, SAcl.PERMISSION_WRITE_ACL, SAcl.PERMISSION_FULL + * permission1 applies to objects, permission2 applies to buckets. + * a symbol to indicate whether the principal is anonymous (i.e. string "A") or authenticated user (i.e. + * string "*") - otherwise null indicates a single ACL for all users. + * + * Access controls that are specified via the "x-amz-acl:" headers in REST requests for buckets. + * The ACL request string is treated as a request for a known cannedAccessPolicy + * @param aclRequestString - The requested ACL from the set of AWS S3 canned ACLs + * @param target - Either "SBucket" or otherwise assumed to be for a single object item + * @param ownerID - An ID for the owner, if used in place of symbols "A" or "*" + */ + public static Triple getCannedAccessControls ( String aclRequestString, String target, String ownerID ) + throws UnsupportedException + { + if ( aclRequestString.equalsIgnoreCase( "public-read" )) + // Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ access. + return new Triple (PERMISSION_FULL, PERMISSION_READ,"A"); + else if (aclRequestString.equalsIgnoreCase( "public-read-write" )) + // Owner gets FULL_CONTROL and the anonymous principal (the 'A' symbol here) is granted READ and WRITE access + return new Triple (PERMISSION_FULL, (PERMISSION_READ | PERMISSION_WRITE),"A"); + else if (aclRequestString.equalsIgnoreCase( "authenticated-read" )) + // Owner gets FULL_CONTROL and ANY principal authenticated as a registered S3 user (the '*' symbol here) is granted READ access + return new Triple (PERMISSION_FULL, PERMISSION_READ,"*"); + else if (aclRequestString.equalsIgnoreCase( "private" )) + // This is termed the "private" or default ACL, "Owner gets FULL_CONTROL" + return new Triple (PERMISSION_FULL, PERMISSION_FULL,null); + else if (aclRequestString.equalsIgnoreCase( "bucket-owner-read" )) + { + // Object Owner gets FULL_CONTROL, Bucket Owner gets READ + // This is equivalent to private when used with PUT Bucket + if ( target.equalsIgnoreCase( "SBucket" )) + return new Triple (PERMISSION_FULL,PERMISSION_FULL ,null); + else + return new Triple (PERMISSION_FULL,PERMISSION_READ,ownerID); + } + else if (aclRequestString.equalsIgnoreCase( "bucket-owner-full-control" )) + { + // Object Owner gets FULL_CONTROL, Bucket Owner gets FULL_CONTROL + // This is equivalent to private when used with PUT Bucket + if ( target.equalsIgnoreCase( "SBucket" )) + return new Triple (PERMISSION_FULL, PERMISSION_FULL, null); + else + return new Triple (PERMISSION_FULL,PERMISSION_FULL, ownerID); + } + else throw new UnsupportedException( "Unknown Canned Access Policy: " + aclRequestString + " is not supported" ); + } + +} diff --git a/awsapi/src/com/cloud/bridge/model/SBucket.hbm.xml b/awsapi/src/com/cloud/bridge/model/SBucket.hbm.xml deleted file mode 100644 index 83f495d9ad9..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SBucket.hbm.xml +++ /dev/null @@ -1,56 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SBucket.java b/awsapi/src/com/cloud/bridge/model/SBucket.java index 720907fa529..7703ae72962 100644 --- a/awsapi/src/com/cloud/bridge/model/SBucket.java +++ b/awsapi/src/com/cloud/bridge/model/SBucket.java @@ -31,14 +31,13 @@ import java.util.Set; * VersioningStatus * For ORM see "com/cloud/bridge/model/SHost.hbm.xml" */ -public class SBucket implements Serializable { - private static final long serialVersionUID = 7430267766019671273L; - - public static final int VERSIONING_NULL = 0; +public interface SBucket { + + public static final int VERSIONING_NULL = 0; public static final int VERSIONING_ENABLED = 1; public static final int VERSIONING_SUSPENDED = 2; - private Long id; +/* private Long id; private String name; private String ownerCanonicalId; @@ -124,5 +123,5 @@ public class SBucket implements Serializable { @Override public int hashCode() { return getName().hashCode(); - } + }*/ } diff --git a/awsapi/src/com/cloud/bridge/model/SBucketVO.java b/awsapi/src/com/cloud/bridge/model/SBucketVO.java new file mode 100644 index 00000000000..ca47bf7b3ca --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/SBucketVO.java @@ -0,0 +1,169 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.model; + +import java.io.Serializable; +import java.util.Date; +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Transient; + +/** + * Holds the relation + * Id, + * Name, + * OwnerCanonicalId, + * SHost, + * CreateTime, + * VersioningStatus + * For ORM see "com/cloud/bridge/model/SHost.hbm.xml" + */ + +@Entity +@Table(name="sbucket") +public class SBucketVO implements SBucket { + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + + @Column(name="Name") + private String name; + + @Column(name="OwnerCanonicalID") + private String ownerCanonicalId; + + @Column(name="SHostID") + private long shostID; + + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) + private Date createTime; + + @Column(name="VersioningStatus") + private int versioningStatus; + + @Transient + private SHostVO shost; + + @Transient + private Set objectsInBucket = new HashSet(); + + public SBucketVO() { + versioningStatus = VERSIONING_NULL; + this.createTime = new Date(); + } + + + public SBucketVO(String bucketName, Date currentGMTTime, + String canonicalUserId, SHostVO first) { + this.versioningStatus = VERSIONING_NULL; + this.name = bucketName; + this.createTime = new Date(); + this.ownerCanonicalId = canonicalUserId; + this.shost = first; + this.shostID = shost.getId(); + } + + + public Long getId() { + return id; + } + + private void setId(Long id) { + this.id = id; + } + + public String getName() { + return name; + } + + public void setName(String name) { + this.name = name; + } + + public String getOwnerCanonicalId() { + return ownerCanonicalId; + } + + public void setOwnerCanonicalId(String ownerCanonicalId) { + this.ownerCanonicalId = ownerCanonicalId; + } + + public long getShostID() { + return shostID; + } + + public void setShostID(long shostID) { + this.shostID = shostID; + } + + public SHostVO getShost() { + return shost; + } + + public void setShost(SHostVO shost) { + this.shost = shost; + } + + public Date getCreateTime() { + return createTime; + } + + + public int getVersioningStatus() { + return versioningStatus; + } + + public void setVersioningStatus( int versioningStatus ) { + this.versioningStatus = versioningStatus; + } + + public Set getObjectsInBucket() { + return objectsInBucket; + } + + public void setObjectsInBucket(Set objectsInBucket) { + this.objectsInBucket = objectsInBucket; + } + + @Override + public boolean equals(Object other) { + if(this == other) + return true; + + if(!(other instanceof SBucketVO)) + return false; + + return getName().equals(((SBucketVO)other).getName()); + } + + @Override + public int hashCode() { + return getName().hashCode(); + } +} diff --git a/awsapi/src/com/cloud/bridge/model/SHost.hbm.xml b/awsapi/src/com/cloud/bridge/model/SHost.hbm.xml deleted file mode 100644 index 5308a0837fd..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SHost.hbm.xml +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SHost.java b/awsapi/src/com/cloud/bridge/model/SHost.java index 758cbbf0930..4ed5b7e45c9 100644 --- a/awsapi/src/com/cloud/bridge/model/SHost.java +++ b/awsapi/src/com/cloud/bridge/model/SHost.java @@ -20,17 +20,19 @@ import java.io.Serializable; import java.util.HashSet; import java.util.Set; -public class SHost implements Serializable { - private static final long serialVersionUID = 213346565810468018L; +public interface SHost { public static final int STORAGE_HOST_TYPE_LOCAL = 0; public static final int STORAGE_HOST_TYPE_NFS = 1; - - private Long id; + public static enum StorageHostType { + STORAGE_HOST_TYPE_LOCAL, //0 + STORAGE_HOST_TYPE_NFS //1 + } +/* private Long id; private String host; private int hostType; - private MHost mhost; + private MHostVO mhost; private String exportRoot; private String userOnHost; private String userPassword; @@ -89,11 +91,11 @@ public class SHost implements Serializable { this.userPassword = userPassword; } - public MHost getMhost() { + public MHostVO getMhost() { return mhost; } - public void setMhost(MHost mhost) { + public void setMhost(MHostVO mhost) { this.mhost = mhost; } @@ -111,5 +113,5 @@ public class SHost implements Serializable { public void setMounts(Set mounts) { this.mounts = mounts; - } + }*/ } diff --git a/awsapi/src/com/cloud/bridge/model/SHostVO.java b/awsapi/src/com/cloud/bridge/model/SHostVO.java new file mode 100644 index 00000000000..f3d2a5801b6 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/model/SHostVO.java @@ -0,0 +1,152 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.model; + +import java.util.HashSet; +import java.util.Set; + +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Transient; + +@Entity +@Table(name="shost") +public class SHostVO implements SHost{ + private static final long serialVersionUID = 213346565810468018L; + + public static final int STORAGE_HOST_TYPE_LOCAL = 0; + public static final int STORAGE_HOST_TYPE_NFS = 1; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") + private Long id; + + @Column(name="Host") + private String host; + + @Column(name="HostType") + private int hostType; + + @Column(name="MHostID") + private long mhostid; + + @Column(name="ExportRoot") + private String exportRoot; + + @Column(name="UserOnHost") + private String userOnHost; + + @Column(name="UserPassword") + private String userPassword; + + @Transient + private MHostVO mhost; + + @Transient + private Set buckets = new HashSet(); + + @Transient + private Set mounts = new HashSet(); + + public SHostVO() { + } + + public Long getId() { + return id; + } + + private void setId(Long id) { + this.id = id; + } + + public String getHost() { + return host; + } + + public void setHost(String host) { + this.host = host; + } + + public int getHostType() { + return hostType; + } + + public void setHostType(int hostType) { + this.hostType = hostType; + } + + public long getMhostid() { + return mhostid; + } + + public void setMhostid(long mhostid) { + this.mhostid = mhostid; + } + + public String getExportRoot() { + return exportRoot; + } + + public void setExportRoot(String exportRoot) { + this.exportRoot = exportRoot; + } + + public String getUserOnHost() { + return userOnHost; + } + + public void setUserOnHost(String userOnHost) { + this.userOnHost = userOnHost; + } + + public String getUserPassword() { + return userPassword; + } + + public void setUserPassword(String userPassword) { + this.userPassword = userPassword; + } + + public MHostVO getMhost() { + return mhost; + } + + public void setMhost(MHostVO mhost) { + this.mhost = mhost; + } + + public Set getBuckets() { + return buckets; + } + + public void setBuckets(Set buckets) { + this.buckets = buckets; + } + + public Set getMounts() { + return mounts; + } + + public void setMounts(Set mounts) { + this.mounts = mounts; + } +} diff --git a/awsapi/src/com/cloud/bridge/model/SMeta.hbm.xml b/awsapi/src/com/cloud/bridge/model/SMeta.hbm.xml deleted file mode 100644 index 0f3b12be7f7..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SMeta.hbm.xml +++ /dev/null @@ -1,41 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SMeta.java b/awsapi/src/com/cloud/bridge/model/SMetaVO.java similarity index 74% rename from awsapi/src/com/cloud/bridge/model/SMeta.java rename to awsapi/src/com/cloud/bridge/model/SMetaVO.java index 2c985710ae4..537310fc8f7 100644 --- a/awsapi/src/com/cloud/bridge/model/SMeta.java +++ b/awsapi/src/com/cloud/bridge/model/SMetaVO.java @@ -18,18 +18,36 @@ package com.cloud.bridge.model; import java.io.Serializable; -public class SMeta implements Serializable { +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name="meta") +public class SMetaVO implements Serializable { private static final long serialVersionUID = 7459503272337054283L; + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; + + @Column(name="Target") private String target; + + @Column(name="TargetID") private long targetId; + @Column(name="Name") private String name; - private String value; - public SMeta() { - } + @Column(name="Value") + private String value; + + public SMetaVO() {} public Long getId() { return id; @@ -76,11 +94,11 @@ public class SMeta implements Serializable { if(this == other) return true; - if(!(other instanceof SMeta)) + if(!(other instanceof SMetaVO)) return false; - return getTarget().equals(((SMeta)other).getTarget()) && getTargetId() == ((SMeta)other).getTargetId() - && getName().equals(((SMeta)other).getName()); + return getTarget().equals(((SMetaVO)other).getTarget()) && getTargetId() == ((SMetaVO)other).getTargetId() + && getName().equals(((SMetaVO)other).getName()); } @Override diff --git a/awsapi/src/com/cloud/bridge/model/SObject.hbm.xml b/awsapi/src/com/cloud/bridge/model/SObject.hbm.xml deleted file mode 100644 index 3929bcddad7..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SObject.hbm.xml +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SObjectItem.hbm.xml b/awsapi/src/com/cloud/bridge/model/SObjectItem.hbm.xml deleted file mode 100644 index 02297a2afff..00000000000 --- a/awsapi/src/com/cloud/bridge/model/SObjectItem.hbm.xml +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/SObjectItem.java b/awsapi/src/com/cloud/bridge/model/SObjectItemVO.java similarity index 67% rename from awsapi/src/com/cloud/bridge/model/SObjectItem.java rename to awsapi/src/com/cloud/bridge/model/SObjectItemVO.java index bbff2679109..8d86103d90b 100644 --- a/awsapi/src/com/cloud/bridge/model/SObjectItem.java +++ b/awsapi/src/com/cloud/bridge/model/SObjectItemVO.java @@ -19,22 +19,56 @@ package com.cloud.bridge.model; import java.io.Serializable; import java.util.Date; -public class SObjectItem implements Serializable { - private static final long serialVersionUID = -7351173256185687851L; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Transient; +@Entity +@Table(name="sobject_item") +public class SObjectItemVO { + private static final long serialVersionUID = -7351173256185687851L; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; - private SObject theObject; + @Column(name="SObjectID") + private long sObjectID; + + @Column(name="Version") private String version; + + @Column(name="MD5") private String md5; + + @Column(name="StoredPath") private String storedPath; + + @Column(name="StoredSize") private long storedSize; + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date createTime; + + @Column(name="LastModifiedTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date lastModifiedTime; + + @Column(name="LastAccessTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date lastAccessTime; - public SObjectItem() { + @Transient + private SObjectVO theObject; + public SObjectItemVO() { } public Long getId() { @@ -45,14 +79,22 @@ public class SObjectItem implements Serializable { this.id = id; } - public SObject getTheObject() { + public SObjectVO getTheObject() { return theObject; } - public void setTheObject(SObject theObject) { + public void setTheObject(SObjectVO theObject) { this.theObject = theObject; } + public long getsObjectID() { + return sObjectID; + } + + public void setsObjectID(long sObjectID) { + this.sObjectID = sObjectID; + } + public String getVersion() { return version; } @@ -114,22 +156,22 @@ public class SObjectItem implements Serializable { if(this == other) return true; - if(!(other instanceof SObjectItem)) + if(!(other instanceof SObjectItemVO)) return false; if(version != null) { - if(!version.equals(((SObjectItem)other).getVersion())) + if(!version.equals(((SObjectItemVO)other).getVersion())) return false; } else { - if(((SObjectItem)other).getVersion() != null) + if(((SObjectItemVO)other).getVersion() != null) return false; } if(theObject.getId() != null) { - if(!theObject.getId().equals(((SObjectItem)other).getTheObject())) + if(!theObject.getId().equals(((SObjectItemVO)other).getTheObject())) return false; } else { - if(((SObjectItem)other).getTheObject() != null) + if(((SObjectItemVO)other).getTheObject() != null) return false; } return true; diff --git a/awsapi/src/com/cloud/bridge/model/SObject.java b/awsapi/src/com/cloud/bridge/model/SObjectVO.java similarity index 68% rename from awsapi/src/com/cloud/bridge/model/SObject.java rename to awsapi/src/com/cloud/bridge/model/SObjectVO.java index 1692b685295..02f576a8b73 100644 --- a/awsapi/src/com/cloud/bridge/model/SObject.java +++ b/awsapi/src/com/cloud/bridge/model/SObjectVO.java @@ -16,30 +16,58 @@ // under the License. package com.cloud.bridge.model; -import java.io.Serializable; import java.util.Date; import java.util.HashSet; import java.util.Iterator; import java.util.Set; -public class SObject implements Serializable { - private static final long serialVersionUID = 8566744941395660486L; +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; +import javax.persistence.Temporal; +import javax.persistence.TemporalType; +import javax.persistence.Transient; + +@Entity +@Table(name="sobject") +public class SObjectVO { + //private static final long serialVersionUID = 8566744941395660486L; + + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; + @Column(name="SBucketID") + private long bucketID; + + @Column(name="NameKey") private String nameKey; + + @Column(name="OwnerCanonicalID") private String ownerCanonicalId; + @Column(name="NextSequence") private int nextSequence; - private String deletionMark; // This must also a unique ID to give to the REST client + @Column(name="DeletionMark") + private String deletionMark; // This must also a unique ID to give to the REST client + + @Column(name="CreateTime") + @Temporal(value=TemporalType.TIMESTAMP) private Date createTime; + @Transient private SBucket bucket; - private Set items = new HashSet(); - - public SObject() { + @Transient + private Set items = new HashSet(); + + public SObjectVO() { deletionMark = null; } @@ -51,6 +79,14 @@ public class SObject implements Serializable { this.id = id; } + public long getBucketID() { + return bucketID; + } + + public void setBucketID(long bucketID) { + this.bucketID = bucketID; + } + public String getNameKey() { return nameKey; } @@ -99,20 +135,20 @@ public class SObject implements Serializable { this.bucket = bucket; } - public Set getItems() { + public Set getItems() { return items; } - public void setItems(Set items) { + public void setItems(Set items) { this.items = items; } public void deleteItem( long id ) { - Iterator it = getItems().iterator(); + Iterator it = getItems().iterator(); while( it.hasNext()) { - SObjectItem oneItem = it.next(); + SObjectItemVO oneItem = it.next(); if (id == oneItem.getId()) { boolean bRemoved = items.remove( oneItem ); System.out.println( "deleteItem from sobject: " + bRemoved ); @@ -121,15 +157,15 @@ public class SObject implements Serializable { } } - public SObjectItem getLatestVersion( boolean versioningOff ) { - Iterator it = getItems().iterator(); + public SObjectItemVO getLatestVersion( boolean versioningOff ) { + Iterator it = getItems().iterator(); int maxVersion = 0; int curVersion = 0; - SObjectItem latestItem = null; + SObjectItemVO latestItem = null; while( it.hasNext()) { - SObjectItem item = it.next(); + SObjectItemVO item = it.next(); // If versioning is off then return the item with the null version string (if exists) // For example, the bucket could have allowed versioning and then it was suspended @@ -160,12 +196,12 @@ public class SObject implements Serializable { * @param wantVersion * @return */ - public SObjectItem getVersion( String wantVersion ) + public SObjectItemVO getVersion( String wantVersion ) { - Iterator it = getItems().iterator(); + Iterator it = getItems().iterator(); while( it.hasNext()) { - SObjectItem item = it.next(); + SObjectItemVO item = it.next(); String curVersion = item.getVersion(); if (null != curVersion && wantVersion.equalsIgnoreCase( curVersion )) return item; } @@ -177,17 +213,17 @@ public class SObject implements Serializable { if(this == other) return true; - if(!(other instanceof SObject)) + if(!(other instanceof SObjectVO)) return false; - if(!getNameKey().equals(((SObject)other).getNameKey())) + if(!getNameKey().equals(((SObjectVO)other).getNameKey())) return false; if(getBucket() != null) { - if(!getBucket().equals(((SObject)other).getBucket())) + if(!getBucket().equals(((SObjectVO)other).getBucket())) return false; } else { - if(((SObject)other).getBucket() != null) + if(((SObjectVO)other).getBucket() != null) return false; } diff --git a/awsapi/src/com/cloud/bridge/model/UserCredentials.hbm.xml b/awsapi/src/com/cloud/bridge/model/UserCredentials.hbm.xml deleted file mode 100644 index d2f39190556..00000000000 --- a/awsapi/src/com/cloud/bridge/model/UserCredentials.hbm.xml +++ /dev/null @@ -1,38 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/bridge/model/UserCredentials.java b/awsapi/src/com/cloud/bridge/model/UserCredentialsVO.java similarity index 72% rename from awsapi/src/com/cloud/bridge/model/UserCredentials.java rename to awsapi/src/com/cloud/bridge/model/UserCredentialsVO.java index b674f40c191..dae5fd91fbc 100644 --- a/awsapi/src/com/cloud/bridge/model/UserCredentials.java +++ b/awsapi/src/com/cloud/bridge/model/UserCredentialsVO.java @@ -18,17 +18,39 @@ package com.cloud.bridge.model; import java.io.Serializable; -public class UserCredentials implements Serializable { +import javax.persistence.Column; +import javax.persistence.Entity; +import javax.persistence.GeneratedValue; +import javax.persistence.GenerationType; +import javax.persistence.Id; +import javax.persistence.Table; + +@Entity +@Table(name="usercredentials") +public class UserCredentialsVO{ private static final long serialVersionUID = 7459503272337054299L; + @Id + @GeneratedValue(strategy=GenerationType.IDENTITY) + @Column(name="ID") private Long id; + + @Column(name="AccessKey") private String accessKey; + + @Column(name="SecretKey") private String secretKey; + + @Column(name="CertUniqueId") private String certUniqueId; - public UserCredentials() { - } + public UserCredentialsVO() { } + public UserCredentialsVO(String accessKey, String secretKey) { + this.accessKey = accessKey; + this.secretKey = secretKey; + } + public Long getId() { return id; } @@ -65,14 +87,14 @@ public class UserCredentials implements Serializable { public boolean equals(Object other) { if (this == other) return true; - if (!(other instanceof UserCredentials)) return false; + if (!(other instanceof UserCredentialsVO)) return false; // The cert id can be null. The cert is unused in the REST API. - if ( getAccessKey().equals(((UserCredentials)other).getAccessKey()) && - getSecretKey().equals(((UserCredentials)other).getSecretKey())) + if ( getAccessKey().equals(((UserCredentialsVO)other).getAccessKey()) && + getSecretKey().equals(((UserCredentialsVO)other).getSecretKey())) { String thisCertId = getCertUniqueId(); - String otherCertId = ((UserCredentials)other).getCertUniqueId(); + String otherCertId = ((UserCredentialsVO)other).getCertUniqueId(); if (null == thisCertId && null == otherCertId) return true; diff --git a/awsapi/src/com/cloud/bridge/persist/EntityDao.java b/awsapi/src/com/cloud/bridge/persist/EntityDao.java deleted file mode 100644 index cc4a8c7d818..00000000000 --- a/awsapi/src/com/cloud/bridge/persist/EntityDao.java +++ /dev/null @@ -1,118 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.persist; - -import java.io.Serializable; -import java.util.List; - -import org.hibernate.Query; -import org.hibernate.Session; - -import com.cloud.bridge.util.QueryHelper; - -/** - * Provide methods for getting, saving, deleting or updating state per session or, in a given session, returnin a List in - * response to queryEntities for a particular instantation of the EntityDao generic class, as defined here. - * Any instantation of EntityDao passes in the class for which it is instantiating. For example a new instance of SBucketDao - * passes in com.cloud.bridge.model.SBucket as its clazz. - * Instantiators, providing an Entity definition, are the classes - * MHostDao, - * MHostMountDao, - * SAclDao, - * SBucketDao, - * SHostDao, - * SMetaDao, - * SObjectDao, - * SObjectItemDao, - * CloudStackSvcOfferingDao - */ - -public class EntityDao { - private Class clazz; - - private boolean isCloudStackSession = false; - - // Constructor to implement CloudStackSvcOffering: see class CloudStackSvcOfferingDao - public EntityDao(Class clazz){ - this(clazz, false); - } - - public EntityDao(Class clazz, boolean isCloudStackSession) { - this.clazz = clazz; - this.isCloudStackSession = isCloudStackSession; - // Note : beginTransaction can be called multiple times - // "If a new underlying transaction is required, begin the transaction. Otherwise continue the new work in the - // context of the existing underlying transaction." from the Hibernate spec - PersistContext.beginTransaction(isCloudStackSession); - } - - @SuppressWarnings("unchecked") - public T get(Serializable id) { - Session session = PersistContext.getSession(isCloudStackSession); - return (T)session.get(clazz, id); - } - - public T save(T entity) { - Session session = PersistContext.getSession(isCloudStackSession); - session.saveOrUpdate(entity); - return entity; - } - - public T update(T entity) { - Session session = PersistContext.getSession(isCloudStackSession); - session.saveOrUpdate(entity); - return entity; - } - - public void delete(T entity) { - Session session = PersistContext.getSession(isCloudStackSession); - session.delete(entity); - } - - public T queryEntity(String hql, Object[] params) { - Session session = PersistContext.getSession(isCloudStackSession); - Query query = session.createQuery(hql); - query.setMaxResults(1); - QueryHelper.bindParameters(query, params); - return (T)query.uniqueResult(); - } - - public List queryEntities(String hql, Object[] params) { - Session session = PersistContext.getSession(isCloudStackSession); - Query query = session.createQuery(hql); - QueryHelper.bindParameters(query, params); - - return (List)query.list(); - } - - public List queryEntities(String hql, int offset, int limit, Object[] params) { - Session session = PersistContext.getSession(isCloudStackSession); - Query query = session.createQuery(hql); - QueryHelper.bindParameters(query, params); - query.setFirstResult(offset); - query.setMaxResults(limit); - return (List)query.list(); - } - - public int executeUpdate(String hql, Object[] params) { - Session session = PersistContext.getSession(isCloudStackSession); - Query query = session.createQuery(hql); - QueryHelper.bindParameters(query, params); - - return query.executeUpdate(); - } -} diff --git a/awsapi/src/com/cloud/bridge/persist/GMTDateTimeUserType.java b/awsapi/src/com/cloud/bridge/persist/GMTDateTimeUserType.java deleted file mode 100644 index cf16233b680..00000000000 --- a/awsapi/src/com/cloud/bridge/persist/GMTDateTimeUserType.java +++ /dev/null @@ -1,102 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.persist; - -import java.io.Serializable; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.sql.Types; -import java.util.Date; - -import org.hibernate.HibernateException; -import org.hibernate.usertype.UserType; - -import com.cloud.bridge.util.DateHelper; - -/** - * GMTDateTimeUserType implements a Hibernate user type, it deals with GMT date/time conversion - * between Java Date/Calendar and MySQL DATE types - */ -public class GMTDateTimeUserType implements UserType { - - private static final int[] SQL_TYPES = { Types.VARBINARY }; - - public Class returnedClass() { return Date.class; } - - public boolean equals(Object x, Object y) { - if (x == y) - return true; - - if (x == null || y == null) - return false; - - return x.equals(y); - } - - public int hashCode(Object x) { - if(x != null) - return x.hashCode(); - - return 0; - } - - public Object deepCopy(Object value) { - if(value != null) - return ((Date)value).clone(); - return null; - } - - public boolean isMutable() { - return true; - } - - public Object nullSafeGet(ResultSet resultSet, String[] names, Object owner) - throws HibernateException, SQLException { - - String dateString = resultSet.getString(names[0]); - if(dateString != null) - return DateHelper.parseDateString(DateHelper.GMT_TIMEZONE, dateString); - return null; - } - - public void nullSafeSet(PreparedStatement statement, Object value, int index) - throws HibernateException, SQLException { - if (value == null) { - statement.setNull(index, Types.TIMESTAMP); - } else { - Date dt = (Date)value; - statement.setString(index, DateHelper.getDateDisplayString(DateHelper.GMT_TIMEZONE, dt)); - } - } - - public Object assemble(Serializable cached, Object owner) throws HibernateException { - return DateHelper.parseDateString(DateHelper.GMT_TIMEZONE, (String)cached); - } - - public Serializable disassemble(Object value) throws HibernateException { - return DateHelper.getDateDisplayString(DateHelper.GMT_TIMEZONE, (Date)value); - } - - public Object replace(Object original, Object target, Object owner) throws HibernateException { - return ((Date)original).clone(); - } - - public int[] sqlTypes() { - return SQL_TYPES; - } -} diff --git a/awsapi/src/com/cloud/bridge/persist/PersistContext.java b/awsapi/src/com/cloud/bridge/persist/PersistContext.java deleted file mode 100644 index 8e5e0de8884..00000000000 --- a/awsapi/src/com/cloud/bridge/persist/PersistContext.java +++ /dev/null @@ -1,359 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.persist; - -import java.sql.Connection; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.HashMap; -import java.util.Map; - -import org.apache.log4j.Logger; -import org.hibernate.HibernateException; -import org.hibernate.Session; -import org.hibernate.Transaction; - -import com.cloud.bridge.util.CloudSessionFactory; -import com.cloud.bridge.util.CloudStackSessionFactory; -import com.cloud.bridge.util.OrderedPair; - -/** - * - * We use Per-thread based hibernate session and transaction pattern. Transaction will be - * started implicitly by EntityDao instances and be committed implicitly in the end of - * request-process cycle. All requests are guarded by a dynamic proxy. - * - * We will try to keep transaction management as implicit as we can, so that - * most of service layer implementation contains business-logic only, all business logic are - * built on top of domain object model, and all persistent layer handling lie within persist layer - * in Dao classes. - * - * PersistContext class also provides per-thread based registry service and global named-lock service - */ -public class PersistContext { - protected final static Logger logger = Logger.getLogger(PersistContext.class); - - private static final CloudSessionFactory sessionFactory; - - private static final ThreadLocal threadSession = new ThreadLocal(); - private static final ThreadLocal threadTransaction = new ThreadLocal(); - private static final ThreadLocal> threadStore = new ThreadLocal>(); - - private static final CloudStackSessionFactory cloudStackSessionFactory; - private static final ThreadLocal threadCloudStackSession = new ThreadLocal(); - private static final ThreadLocal threadCloudStackTransaction = new ThreadLocal(); - - static { - try { - sessionFactory = CloudSessionFactory.getInstance(); - cloudStackSessionFactory = CloudStackSessionFactory.getInstance(); - } catch(HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - throw new PersistException(e); - } - } - - public static Session getSession(boolean cloudStackSession) { - Session s = null; - try { - if(cloudStackSession){ - s = threadCloudStackSession.get(); - if(s == null) { - s = cloudStackSessionFactory.openSession(); - threadCloudStackSession.set(s); - } - }else{ - s = threadSession.get(); - if(s == null) { - s = sessionFactory.openSession(); - threadSession.set(s); - } - } - } catch(HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - throw new PersistException(e); - } - return s; - } - - public static Session getSession() { - return getSession(false); - } - - public static void closeSession() { - closeSession(false); - } - - public static void closeSession(boolean cloudStackSession) { - try { - if(cloudStackSession){ - Session s = (Session) threadCloudStackSession.get(); - threadCloudStackSession.set(null); - if (s != null && s.isOpen()) - s.close(); - }else{ - Session s = (Session) threadSession.get(); - threadSession.set(null); - - if (s != null && s.isOpen()) - s.close(); - } - }catch(HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - throw new PersistException(e); - } - } - - public static void beginTransaction(boolean cloudStackTxn) { - Transaction tx = null; - try { - if(cloudStackTxn){ - tx = threadCloudStackTransaction.get(); - }else{ - tx = threadTransaction.get(); - } - - if (tx == null) { - tx = getSession(cloudStackTxn).beginTransaction(); - if(cloudStackTxn){ - threadCloudStackTransaction.set(tx); - }else{ - threadTransaction.set(tx); - } - } - } catch(HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - throw new PersistException(e); - } - } - - public static void beginTransaction() { - beginTransaction(false); - } - - public static void commitTransaction(boolean cloudStackTxn) { - Transaction tx = null; - - if(cloudStackTxn){ - tx = threadCloudStackTransaction.get(); - }else{ - tx = threadTransaction.get(); - } - - try { - if ( tx != null && !tx.wasCommitted() && !tx.wasRolledBack() ){ - tx.commit(); - } - if(cloudStackTxn){ - threadCloudStackTransaction.set(null); - }else{ - threadTransaction.set(null); - } - } catch (HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - - rollbackTransaction(cloudStackTxn); - throw new PersistException(e); - } - } - - public static void commitTransaction() { - commitTransaction(false); - } - - public static void rollbackTransaction(boolean cloudStackTxn) { - Transaction tx = null; - - if(cloudStackTxn){ - tx = (Transaction)threadCloudStackTransaction.get(); - threadCloudStackTransaction.set(null); - }else{ - tx = (Transaction)threadTransaction.get(); - threadTransaction.set(null); - } - try { - if ( tx != null && !tx.wasCommitted() && !tx.wasRolledBack() ) { - tx.rollback(); - } - } catch (HibernateException e) { - logger.error("Exception " + e.getMessage(), e); - throw new PersistException(e); - } finally { - closeSession(cloudStackTxn); - } - } - - public static void rollbackTransaction() { - rollbackTransaction(false); - } - - public static void flush() { - commitTransaction(); - beginTransaction(); - } - - /** - * acquireNamedLock/releaseNamedLock must be called in pairs and within the same thread - * they can not be called recursively neither - * - * @param name - * @param timeoutSeconds - * @return - */ - public static boolean acquireNamedLock(String name, int timeoutSeconds) { - Connection jdbcConnection = getJDBCConnection(name, true); - if(jdbcConnection == null) { - logger.warn("Unable to acquire named lock connection for named lock: " + name); - return false; - } - - PreparedStatement pstmt = null; - try { - pstmt = jdbcConnection.prepareStatement("SELECT COALESCE(GET_LOCK(?, ?),0)"); - - pstmt.setString(1, name); - pstmt.setInt(2, timeoutSeconds); - - ResultSet rs = pstmt.executeQuery(); - if (rs != null && rs.first()) { - if(rs.getInt(1) > 0) { - return true; - } else { - logger.error("GET_LOCK() timed out on lock : " + name); - } - } - } catch (SQLException e) { - logger.error("GET_LOCK() throws exception ", e); - } catch (Throwable e) { - logger.error("GET_LOCK() throws exception ", e); - } finally { - if (pstmt != null) { - try { - pstmt.close(); - } catch (SQLException e) { - logger.error("Unexpected exception " + e.getMessage(), e); - } - } - } - - releaseJDBCConnection(name); - return false; - } - - public static boolean releaseNamedLock(String name) { - Connection jdbcConnection = getJDBCConnection(name, false); - if(jdbcConnection == null) { - logger.error("Unable to acquire DB connection for global lock system"); - return false; - } - - PreparedStatement pstmt = null; - try { - pstmt = jdbcConnection.prepareStatement("SELECT COALESCE(RELEASE_LOCK(?), 0)"); - pstmt.setString(1, name); - ResultSet rs = pstmt.executeQuery(); - if(rs != null && rs.first()) - return rs.getInt(1) > 0; - logger.error("RELEASE_LOCK() returns unexpected result : " + rs.getInt(1)); - } catch (SQLException e) { - logger.error("RELEASE_LOCK() throws exception ", e); - } catch (Throwable e) { - logger.error("RELEASE_LOCK() throws exception ", e); - } finally { - releaseJDBCConnection(name); - } - return false; - } - - @SuppressWarnings("deprecation") - private static Connection getJDBCConnection(String name, boolean allocNew) { - String registryKey = "JDBC-Connection." + name; - OrderedPair info = (OrderedPair)getThreadStoreObject(registryKey); - if(info == null && allocNew) { - Session session = sessionFactory.openSession(); - Connection connection = session.connection(); - if(connection == null) { - session.close(); - return null; - } - - try { - connection.setAutoCommit(true); - } catch(SQLException e) { - logger.warn("Unexpected exception " + e.getMessage(), e); - try { - connection.close(); - session.close(); - } catch(Throwable ex) { - logger.warn("Unexpected exception " + e.getMessage(), e); - } - return null; - } - - registerThreadStoreObject(registryKey, new OrderedPair(session, connection)); - return connection; - } - - if(info != null) - return info.getSecond(); - - return null; - } - - private static void releaseJDBCConnection(String name) { - String registryKey = "JDBC-Connection." + name; - OrderedPair info = (OrderedPair)unregisterThreadStoreObject(registryKey); - if(info != null) { - try { - info.getSecond().close(); - info.getFirst().close(); - } catch(Throwable e) { - logger.warn("Unexpected exception " + e.getMessage(), e); - } - } - } - - public static void registerThreadStoreObject(String name, Object object) { - Map store = getThreadStore(); - store.put(name, object); - } - - public static Object getThreadStoreObject(String name) { - Map store = getThreadStore(); - return store.get(name); - } - - public static Object unregisterThreadStoreObject(String name) { - Map store = getThreadStore(); - if(store.containsKey(name)) { - Object value = store.get(name); - store.remove(name); - return value; - } - return null; - } - - private static Map getThreadStore() { - Map store = threadStore.get(); - if(store == null) { - store = new HashMap(); - threadStore.set(store); - } - return store; - } -} diff --git a/awsapi/src/com/cloud/bridge/persist/PersistException.java b/awsapi/src/com/cloud/bridge/persist/PersistException.java deleted file mode 100644 index 920cf40740e..00000000000 --- a/awsapi/src/com/cloud/bridge/persist/PersistException.java +++ /dev/null @@ -1,36 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.persist; - -public class PersistException extends RuntimeException { - private static final long serialVersionUID = -7137918292537610367L; - - public PersistException() { - } - - public PersistException(String message) { - super(message); - } - - public PersistException(Throwable e) { - super(e); - } - - public PersistException(String message, Throwable e) { - super(message, e); - } -} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDao.java b/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDao.java index dda0e2d7128..f23db439203 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDao.java @@ -1,159 +1,12 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Properties; +import com.cloud.bridge.model.BucketPolicyVO; +import com.cloud.utils.db.GenericDao; -import org.apache.log4j.Logger; +public interface BucketPolicyDao extends GenericDao { -import com.cloud.bridge.util.ConfigurationHelper; + void deletePolicy(String bucketName); -public class BucketPolicyDao { - public static final Logger logger = Logger.getLogger(BucketPolicyDao.class); + BucketPolicyVO getByName(String bucketName); - private Connection conn = null; - private String dbName = null; - private String dbUser = null; - private String dbPassword = null; - private String dbHost = null; - private String dbPort = null; - - public BucketPolicyDao() - { - File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties"); - Properties EC2Prop = null; - - if (null != propertiesFile) { - EC2Prop = new Properties(); - try { - EC2Prop.load( new FileInputStream( propertiesFile )); - } catch (FileNotFoundException e) { - logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e); - } catch (IOException e) { - logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e); - } - dbHost = EC2Prop.getProperty( "db.cloud.host" ); - dbName = EC2Prop.getProperty( "db.awsapi.name" ); - dbUser = EC2Prop.getProperty( "db.cloud.username" ); - dbPassword = EC2Prop.getProperty( "db.cloud.password" ); - dbPort = EC2Prop.getProperty( "db.cloud.port" ); - } - } - - public void addPolicy( String bucketName, String owner, String policy ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "INSERT INTO bucket_policies (BucketName, OwnerCanonicalID, Policy) VALUES (?,?,?)" ); - statement.setString( 1, bucketName ); - statement.setString( 2, owner ); - statement.setString( 3, policy ); - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - /** - * Since a bucket policy can exist before its bucket we also need to keep the policy's owner - * so we can restrict who modifies it (because of the "s3:CreateBucket" action). - */ - public String getPolicyOwner( String bucketName ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String owner = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT OwnerCanonicalID FROM bucket_policies WHERE BucketName=?" ); - statement.setString( 1, bucketName ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) owner = rs.getString( "OwnerCanonicalID" ); - statement.close(); - return owner; - - } finally { - closeConnection(); - } - } - - public String getPolicy( String bucketName ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String policy = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT Policy FROM bucket_policies WHERE BucketName=?" ); - statement.setString( 1, bucketName ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) policy = rs.getString( "Policy" ); - statement.close(); - return policy; - - } finally { - closeConnection(); - } - } - - public void deletePolicy( String bucketName ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "DELETE FROM bucket_policies WHERE BucketName=?" ); - statement.setString( 1, bucketName ); - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - private void openConnection() - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - if (null == conn) { - Class.forName( "com.mysql.jdbc.Driver" ).newInstance(); - conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName, dbUser, dbPassword ); - } - } - - private void closeConnection() throws SQLException { - if (null != conn) conn.close(); - conn = null; - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDaoImpl.java new file mode 100644 index 00000000000..41bf3117d0f --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/BucketPolicyDaoImpl.java @@ -0,0 +1,72 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + + +import javax.ejb.Local; + +import org.apache.log4j.Logger; +import com.cloud.bridge.model.BucketPolicyVO; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={BucketPolicyDao.class}) +public class BucketPolicyDaoImpl extends GenericDaoBase implements BucketPolicyDao{ + public static final Logger logger = Logger.getLogger(BucketPolicyDaoImpl.class); + public BucketPolicyDaoImpl(){ } + + /** + * Since a bucket policy can exist before its bucket we also need to keep the policy's owner + * so we can restrict who modifies it (because of the "s3:CreateBucket" action). + */ + @Override + public BucketPolicyVO getByName( String bucketName ) { + SearchBuilder searchByBucket = createSearchBuilder(); + searchByBucket.and("BucketName", searchByBucket.entity().getBucketName(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = searchByBucket.create(); + sc.setParameters("BucketName", bucketName); + return findOneBy(sc); + + }finally { + txn.close(); + } + + } + + @Override + public void deletePolicy( String bucketName ) { + SearchBuilder deleteByBucket = createSearchBuilder(); + deleteByBucket.and("BucketName", deleteByBucket.entity().getBucketName(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = deleteByBucket.create(); + sc.setParameters("BucketName", bucketName); + remove(sc); + + }finally { + txn.close(); + } + + } +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDao.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDao.java index 31a5be873e7..bf8c97a2db7 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDao.java @@ -1,35 +1,11 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import org.apache.log4j.Logger; +import com.cloud.bridge.model.CloudStackAccountVO; +import com.cloud.utils.db.GenericDao; -import com.cloud.bridge.persist.EntityDao; -import com.cloud.stack.models.CloudStackAccount; +public interface CloudStackAccountDao extends + GenericDao { + String getDefaultZoneId(String accountId); + -public class CloudStackAccountDao extends EntityDao { - public static final Logger logger = Logger.getLogger(CloudStackAccountDao.class); - - public CloudStackAccountDao() { - super(CloudStackAccount.class, true); - } - - public CloudStackAccount getdefaultZoneId( String id ) { - return queryEntity("from CloudStackAccount where id=?", new Object[] {id}); - } } - diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDaoImpl.java new file mode 100644 index 00000000000..be3cd778e40 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackAccountDaoImpl.java @@ -0,0 +1,39 @@ +package com.cloud.bridge.persist.dao; + +import javax.ejb.Local; + +import com.cloud.bridge.model.CloudStackAccountVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={CloudStackAccountDao.class}) +public class CloudStackAccountDaoImpl extends GenericDaoBase implements CloudStackAccountDao { + + @Override + public String getDefaultZoneId(String accountId) { + + SearchBuilder SearchByUUID = createSearchBuilder(); + Transaction txn = Transaction.open(Transaction.CLOUD_DB); + try { + txn.start(); + SearchByUUID.and("uuid", SearchByUUID.entity().getUuid(), + SearchCriteria.Op.EQ); + SearchByUUID.done(); + SearchCriteria sc = SearchByUUID.create(); + sc.setParameters("uuid", accountId); + CloudStackAccountVO account = findOneBy(sc); + if (null != account) + if(null != account.getDefaultZoneId()) + return Long.toString(account.getDefaultZoneId()); + return null; + } finally { + txn.commit(); + txn.close(); + } + + } + + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDao.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDao.java index ed16974dbf1..8c2c1850247 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDao.java @@ -1,42 +1,9 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import org.apache.log4j.Logger; - -import com.cloud.bridge.persist.EntityDao; -import com.cloud.stack.models.CloudStackConfiguration; - - -public class CloudStackConfigurationDao extends EntityDao { - public static final Logger logger = Logger.getLogger(CloudStackConfigurationDao.class); - - public CloudStackConfigurationDao() { - super(CloudStackConfiguration.class, true); - } - - - public String getConfigValue( String configName ){ - CloudStackConfiguration config = queryEntity("from CloudStackConfiguration where name=?", new Object[] {configName}); - if(config != null){ - return config.getValue(); - } - return null; - } +import com.cloud.bridge.model.CloudStackConfigurationVO; +import com.cloud.utils.db.GenericDao; +public interface CloudStackConfigurationDao extends GenericDao { + public String getConfigValue(String name); } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDaoImpl.java new file mode 100644 index 00000000000..1e7a70fdba2 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackConfigurationDaoImpl.java @@ -0,0 +1,45 @@ +package com.cloud.bridge.persist.dao; + +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.SQLException; + +import javax.ejb.Local; + +import org.apache.log4j.Logger; + +import com.cloud.bridge.model.CloudStackConfigurationVO; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + + +@Local(value={CloudStackConfigurationDao.class}) +public class CloudStackConfigurationDaoImpl extends GenericDaoBase implements CloudStackConfigurationDao { + private static final Logger s_logger = Logger.getLogger(CloudStackConfigurationDaoImpl.class); + + final SearchBuilder NameSearch= createSearchBuilder(); + + public CloudStackConfigurationDaoImpl() { } + + + @Override + @DB + public String getConfigValue(String name) { + NameSearch.and("name", NameSearch.entity().getName(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.currentTxn(); + try { + txn.start(); + SearchCriteria sc = NameSearch.create(); + sc.setParameters("name", name); + return findOneBy(sc).getValue(); + }finally { + + } + } + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDao.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDao.java index 5013eac88d7..4b0c9e16a56 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDao.java @@ -1,42 +1,13 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import org.apache.log4j.Logger; +import com.cloud.bridge.model.CloudStackServiceOfferingVO; +import com.cloud.utils.db.GenericDao; -import com.cloud.bridge.persist.EntityDao; -import com.cloud.stack.models.CloudStackConfiguration; -import com.cloud.stack.models.CloudStackServiceOffering; +public interface CloudStackSvcOfferingDao extends GenericDao{ + public CloudStackServiceOfferingVO getSvcOfferingByName(String name); -public class CloudStackSvcOfferingDao extends EntityDao { - public static final Logger logger = Logger.getLogger(CloudStackSvcOfferingDao.class); + public CloudStackServiceOfferingVO getSvcOfferingById(String id); - public CloudStackSvcOfferingDao() { - super(CloudStackServiceOffering.class, true); - } - - - public CloudStackServiceOffering getSvcOfferingByName( String name ){ - return queryEntity("from CloudStackServiceOffering where name=?", new Object[] {name}); - } - - public CloudStackServiceOffering getSvcOfferingById( String id ){ - return queryEntity("from CloudStackServiceOffering where id=?", new Object[] {id}); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDaoImpl.java new file mode 100644 index 00000000000..dca38912aa5 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/CloudStackSvcOfferingDaoImpl.java @@ -0,0 +1,75 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import javax.ejb.Local; + +import org.apache.log4j.Logger; + +import com.cloud.bridge.model.CloudStackServiceOfferingVO; +import com.cloud.bridge.model.SHostVO; +import com.cloud.stack.models.CloudStackConfiguration; +import com.cloud.stack.models.CloudStackServiceOffering; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={CloudStackSvcOfferingDao.class}) +public class CloudStackSvcOfferingDaoImpl extends GenericDaoBase implements CloudStackSvcOfferingDao { + public static final Logger logger = Logger.getLogger(CloudStackSvcOfferingDaoImpl.class); + + public CloudStackSvcOfferingDaoImpl() { } + + @Override + public CloudStackServiceOfferingVO getSvcOfferingByName( String name ){ + SearchBuilder searchByName = createSearchBuilder(); + searchByName.and("name", searchByName.entity().getName(), SearchCriteria.Op.EQ); + searchByName.done(); + Transaction txn = Transaction.open(Transaction.CLOUD_DB); + try { + txn.start(); + SearchCriteria sc = searchByName.create(); + sc.setParameters("name", name); + return findOneBy(sc); + + }finally { + txn.close(); + } + + + } + @Override + public CloudStackServiceOfferingVO getSvcOfferingById( String id ){ + SearchBuilder searchByID = createSearchBuilder(); + searchByID.and("id", searchByID.entity().getName(), SearchCriteria.Op.EQ); + searchByID.done(); + Transaction txn = Transaction.open(Transaction.CLOUD_DB); + try { + txn.start(); + SearchCriteria sc = searchByID.create(); + sc.setParameters("id", id); + return findOneBy(sc); + + }finally { + txn.close(); + } + + + } + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MHostDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MHostDao.java index 72501311003..a4b65d757f2 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/MHostDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/MHostDao.java @@ -1,30 +1,12 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import com.cloud.bridge.model.MHost; -import com.cloud.bridge.persist.EntityDao; +import com.cloud.bridge.model.MHostVO; +import com.cloud.utils.db.GenericDao; + +public interface MHostDao extends GenericDao { + + MHostVO getByHostKey(String hostKey); + + public void updateHeartBeat(MHostVO mhost); -public class MHostDao extends EntityDao { - public MHostDao() { - super(MHost.class); - } - - public MHost getByHostKey(String hostKey) { - return queryEntity("from MHost where hostKey=?", new Object[] {hostKey}); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MHostDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/MHostDaoImpl.java new file mode 100644 index 00000000000..aff6f810068 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MHostDaoImpl.java @@ -0,0 +1,61 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; +import javax.ejb.Local; + +import com.cloud.bridge.model.MHostVO; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={MHostDao.class}) +public class MHostDaoImpl extends GenericDaoBase implements MHostDao{ + final SearchBuilder NameSearch= createSearchBuilder(); + + public MHostDaoImpl() { + } + + @DB + @Override + public MHostVO getByHostKey(String hostKey) { + NameSearch.and("MHostKey", NameSearch.entity().getHostKey(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = NameSearch.create(); + sc.setParameters("MHostKey", hostKey); + return findOneBy(sc); + + }finally { + txn.close(); + } + } + + @Override + public void updateHeartBeat(MHostVO mhost) { + Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + update(mhost.getId(), mhost); + txn.commit(); + }finally { + txn.close(); + } + } +} \ No newline at end of file diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDao.java index efee98c3f65..7a02c4e884d 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDao.java @@ -1,30 +1,11 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import com.cloud.bridge.model.MHostMount; -import com.cloud.bridge.persist.EntityDao; +import com.cloud.bridge.model.MHostMountVO; +import com.cloud.utils.db.GenericDao; + +public interface MHostMountDao extends GenericDao { + + MHostMountVO getHostMount(long mHostId, long sHostId); + -public class MHostMountDao extends EntityDao { - public MHostMountDao() { - super(MHostMount.class); - } - - public MHostMount getHostMount(long mHostId, long sHostId) { - return queryEntity("from MHostMount where mhost=? and shost=?", new Object[] { mHostId, sHostId } ); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDaoImpl.java new file mode 100644 index 00000000000..4450da844a7 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MHostMountDaoImpl.java @@ -0,0 +1,48 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import javax.ejb.Local; + +import com.cloud.bridge.model.MHostMountVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={MHostMountDao.class}) +public class MHostMountDaoImpl extends GenericDaoBase implements MHostMountDao { + final SearchBuilder SearchByMHostID = createSearchBuilder(); + public MHostMountDaoImpl() { + } + + @Override + public MHostMountVO getHostMount(long mHostId, long sHostId) { + SearchByMHostID.and("MHostID", SearchByMHostID.entity().getmHostID(), SearchCriteria.Op.EQ); + SearchByMHostID.and("SHostID", SearchByMHostID.entity().getsHostID(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByMHostID.create(); + sc.setParameters("MHostID", mHostId); + sc.setParameters("SHostID", sHostId); + return findOneBy(sc); + }finally { + txn.close(); + } + } +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDao.java new file mode 100644 index 00000000000..399e820731d --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDao.java @@ -0,0 +1,18 @@ +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import com.cloud.bridge.model.MultiPartPartsVO; +import com.cloud.utils.db.GenericDao; + +public interface MultiPartPartsDao extends GenericDao { + + List getParts(int uploadId, int maxParts, int startAt); + + int getnumParts(int uploadId, int endMarker); + + MultiPartPartsVO findByUploadID(int uploadId, int partNumber); + + void updateParts(MultiPartPartsVO partVO, int uploadId, int partNumber); + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDaoImpl.java new file mode 100644 index 00000000000..91e43984d4d --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartPartsDaoImpl.java @@ -0,0 +1,101 @@ +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.MultiPartPartsVO; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={MultiPartPartsDao.class}) +public class MultiPartPartsDaoImpl extends GenericDaoBase implements MultiPartPartsDao { + + @Override + public List getParts(int uploadId, int maxParts, int startAt ) { + + SearchBuilder ByUploadID = createSearchBuilder(); + ByUploadID.and("UploadID", ByUploadID.entity().getUploadid(), SearchCriteria.Op.EQ); + ByUploadID.and("partNumber", ByUploadID.entity().getPartNumber(), SearchCriteria.Op.GT); + ByUploadID.and("partNumber", ByUploadID.entity().getPartNumber(), SearchCriteria.Op.LT); + Filter filter = new Filter(MultiPartPartsVO.class, "partNumber", Boolean.TRUE, null, null); + + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = ByUploadID.create(); + sc.setParameters("UploadID", new Long(uploadId)); + sc.setParameters("partNumber", startAt); + sc.setParameters("partNumber", maxParts); + return listBy(sc, filter); + + } finally { + txn.close(); + } + } + + @Override + public int getnumParts( int uploadId, int endMarker ) { + SearchBuilder byUploadID = createSearchBuilder(); + byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ); + byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.GT); + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = byUploadID.create(); + sc.setParameters("UploadID", new Long(uploadId)); + sc.setParameters("partNumber", endMarker); + return listBy(sc).size(); + + } finally { + txn.close(); + } + + + } + + @Override + public MultiPartPartsVO findByUploadID(int uploadId, int partNumber) { + + SearchBuilder byUploadID = createSearchBuilder(); + byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ); + byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = byUploadID.create(); + sc.setParameters("UploadID", new Long(uploadId)); + sc.setParameters("partNumber", partNumber); + return findOneBy(sc); + + } finally { + txn.close(); + } + + } + + @Override + public void updateParts(MultiPartPartsVO partVO, int uploadId, int partNumber) { + + SearchBuilder byUploadID = createSearchBuilder(); + byUploadID.and("UploadID", byUploadID.entity().getUploadid(), SearchCriteria.Op.EQ); + byUploadID.and("partNumber", byUploadID.entity().getPartNumber(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = byUploadID.create(); + sc.setParameters("UploadID", new Long(uploadId)); + sc.setParameters("partNumber", partNumber); + update(partVO, sc); + txn.commit(); + + } finally { + txn.close(); + } + } + +} + diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDao.java new file mode 100644 index 00000000000..4c52958290b --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDao.java @@ -0,0 +1,21 @@ +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import com.cloud.bridge.model.MultiPartUploadsVO; +import com.cloud.bridge.util.OrderedPair; +import com.cloud.utils.db.GenericDao; + +public interface MultiPartUploadsDao extends + GenericDao { + + OrderedPair multipartExits(int uploadId); + + void deleteUpload(int uploadId); + + String getAtrributeValue(String attribute, int uploadid); + + List getInitiatedUploads(String bucketName, + int maxParts, String prefix, String keyMarker, String uploadIdMarker); + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDaoImpl.java new file mode 100644 index 00000000000..b6ad611b28a --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultiPartUploadsDaoImpl.java @@ -0,0 +1,108 @@ +package com.cloud.bridge.persist.dao; + +import java.util.ArrayList; +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.MultiPartPartsVO; +import com.cloud.bridge.model.MultiPartUploadsVO; +import com.cloud.bridge.model.SBucketVO; +import com.cloud.bridge.util.OrderedPair; +import com.cloud.utils.db.Attribute; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={MultiPartUploadsDao.class}) +public class MultiPartUploadsDaoImpl extends GenericDaoBase implements MultiPartUploadsDao { + + @Override + public OrderedPair multipartExits( int uploadId ) { + MultiPartUploadsVO uploadvo = null; + + Transaction txn = null; + try { + txn = Transaction.open(Transaction.AWSAPI_DB); + uploadvo = findById(new Long(uploadId)); + if (null != uploadvo) + return new OrderedPair(uploadvo.getAccessKey(), uploadvo.getNameKey()); + + return null; + } finally { + txn.close(); + } + } + + @Override + public void deleteUpload(int uploadId) { + + Transaction txn = null; + try { + txn = Transaction.open(Transaction.AWSAPI_DB); + remove(new Long(uploadId)); + txn.commit(); + }finally { + txn.close(); + } + } + + @Override + public String getAtrributeValue(String attribute, int uploadid) { + Transaction txn = null; + MultiPartUploadsVO uploadvo = null; + try { + txn = Transaction.open(Transaction.AWSAPI_DB); + uploadvo = findById(new Long(uploadid)); + if (null != uploadvo) { + if ( attribute.equalsIgnoreCase("AccessKey") ) + return uploadvo.getAccessKey(); + else if ( attribute.equalsIgnoreCase("x_amz_acl") ) + return uploadvo.getAmzAcl(); + } + return null; + } finally { + txn.close(); + } + } + + @Override + public List getInitiatedUploads(String bucketName, int maxParts, String prefix, String keyMarker, String uploadIdMarker) { + + List uploadList = new ArrayList(); + + SearchBuilder byBucket = createSearchBuilder(); + byBucket.and("BucketName", byBucket.entity().getBucketName() , SearchCriteria.Op.EQ); + + if (null != prefix) + byBucket.and("NameKey", byBucket.entity().getNameKey(), SearchCriteria.Op.LIKE); + if (null != uploadIdMarker) + byBucket.and("NameKey", byBucket.entity().getNameKey(), SearchCriteria.Op.GT); + if (null != uploadIdMarker) + byBucket.and("ID", byBucket.entity().getId(), SearchCriteria.Op.GT); + + Filter filter = new Filter(MultiPartUploadsVO.class, "nameKey", Boolean.TRUE, null, null); + filter.addOrderBy(MultiPartUploadsVO.class, "createTime", Boolean.TRUE); + + Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = byBucket.create(); + sc.setParameters("BucketName", bucketName); + if (null != prefix) + sc.setParameters("NameKey", prefix); + if (null != uploadIdMarker) + sc.setParameters("NameKey", keyMarker); + if (null != uploadIdMarker) + sc.setParameters("ID", uploadIdMarker); + listBy(sc, filter); + + }finally { + txn.close(); + } + return null; + } + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultipartLoadDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MultipartLoadDao.java index 0ab83ebcf69..c9b5ec75b5f 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/MultipartLoadDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultipartLoadDao.java @@ -34,42 +34,25 @@ import java.util.Properties; import org.apache.log4j.Logger; +import com.cloud.bridge.model.MultiPartPartsVO; +import com.cloud.bridge.model.MultiPartUploadsVO; +import com.cloud.bridge.model.MultipartMetaVO; import com.cloud.bridge.service.core.s3.S3MetaDataEntry; import com.cloud.bridge.service.core.s3.S3MultipartPart; import com.cloud.bridge.service.core.s3.S3MultipartUpload; import com.cloud.bridge.util.ConfigurationHelper; import com.cloud.bridge.util.OrderedPair; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.Transaction; public class MultipartLoadDao { public static final Logger logger = Logger.getLogger(MultipartLoadDao.class); - - private Connection conn = null; - private String dbName = null; - private String dbUser = null; - private String dbPassword = null; - private String dbHost = null; - private String dbPort = null; - public MultipartLoadDao() { - File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties"); - Properties EC2Prop = null; - - if (null != propertiesFile) { - EC2Prop = new Properties(); - try { - EC2Prop.load( new FileInputStream( propertiesFile )); - } catch (FileNotFoundException e) { - logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e); - } catch (IOException e) { - logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e); - } - dbHost = EC2Prop.getProperty( "db.cloud.host" ); - dbName = EC2Prop.getProperty( "db.awsapi.name" ); - dbUser = EC2Prop.getProperty( "db.cloud.username" ); - dbPassword = EC2Prop.getProperty( "db.cloud.password" ); - dbPort = EC2Prop.getProperty( "db.cloud.port" ); - } - } + protected final MultipartMetaDao mpartMetaDao = ComponentLocator.inject(MultipartMetaDaoImpl.class); + protected final MultiPartPartsDao mpartPartsDao = ComponentLocator.inject(MultiPartPartsDaoImpl.class); + protected final MultiPartUploadsDao mpartUploadDao = ComponentLocator.inject(MultiPartUploadsDaoImpl.class); + + public MultipartLoadDao() {} /** * If a multipart upload exists with the uploadId value then return the non-null creators @@ -77,30 +60,13 @@ public class MultipartLoadDao { * * @param uploadId * @return creator of the multipart upload, and NameKey of upload - * @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException */ + + public OrderedPair multipartExits( int uploadId ) throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - PreparedStatement statement = null; - String accessKey = null; - String nameKey = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT AccessKey, NameKey FROM multipart_uploads WHERE ID=?" ); - statement.setInt( 1, uploadId ); - ResultSet rs = statement.executeQuery(); - if ( rs.next()) { - accessKey = rs.getString( "AccessKey" ); - nameKey = rs.getString( "NameKey" ); - return new OrderedPair( accessKey, nameKey ); - } - else return null; - - } finally { - closeConnection(); - } + return mpartUploadDao.multipartExits(uploadId); } /** @@ -110,23 +76,9 @@ public class MultipartLoadDao { * * @param uploadId * - * @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException */ - public void deleteUpload( int uploadId ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "DELETE FROM multipart_uploads WHERE ID=?" ); - statement.setInt( 1, uploadId ); - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } + public void deleteUpload( int uploadId ) { + mpartUploadDao.deleteUpload(uploadId); } /** @@ -134,26 +86,9 @@ public class MultipartLoadDao { * * @param uploadId * @return the access key value defining the initiator - * @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException */ - public String getInitiator( int uploadId ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String initiator = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT AccessKey FROM multipart_uploads WHERE ID=?" ); - statement.setInt( 1, uploadId ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) initiator = rs.getString( "AccessKey" ); - statement.close(); - return initiator; - - } finally { - closeConnection(); - } + public String getInitiator( int uploadId ) { + return mpartUploadDao.getAtrributeValue("AccessKey", uploadId); } /** @@ -165,47 +100,38 @@ public class MultipartLoadDao { * @param cannedAccess * * @return if positive its the uploadId to be returned to the client - * - * @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException + * */ - public int initiateUpload( String accessKey, String bucketName, String key, String cannedAccess, S3MetaDataEntry[] meta ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; + public int initiateUpload( String accessKey, String bucketName, String key, String cannedAccess, S3MetaDataEntry[] meta ) { int uploadId = -1; - - openConnection(); + Transaction txn = null; try { - Date tod = new Date(); - java.sql.Timestamp dateTime = new Timestamp( tod.getTime()); + txn = Transaction.open(Transaction.AWSAPI_DB); + Date tod = new Date(); + MultiPartUploadsVO uploadVO = new MultiPartUploadsVO(accessKey, + bucketName, key, cannedAccess, tod); + uploadVO = mpartUploadDao.persist(uploadVO); - statement = conn.prepareStatement ( "INSERT INTO multipart_uploads (AccessKey, BucketName, NameKey, x_amz_acl, CreateTime) VALUES (?,?,?,?,?)" ); - statement.setString( 1, accessKey ); - statement.setString( 2, bucketName ); - statement.setString( 3, key ); - statement.setString( 4, cannedAccess ); - statement.setTimestamp( 5, dateTime ); - int count = statement.executeUpdate(); - statement.close(); + if (null != uploadVO) { + uploadId = uploadVO.getId().intValue(); + if (null != meta) { + for (int i = 0; i < meta.length; i++) { + MultipartMetaVO mpartMeta = new MultipartMetaVO(); + mpartMeta.setUploadID(uploadId); + S3MetaDataEntry entry = meta[i]; + mpartMeta.setName(entry.getName()); + mpartMeta.setValue(entry.getValue()); + mpartMetaDao.persist(mpartMeta); + } + txn.commit(); + } + } - // -> we need the newly entered ID - statement = conn.prepareStatement ( "SELECT ID FROM multipart_uploads WHERE AccessKey=? AND BucketName=? AND NameKey=? AND CreateTime=?" ); - statement.setString( 1, accessKey ); - statement.setString( 2, bucketName ); - statement.setString( 3, key ); - statement.setTimestamp( 4, dateTime ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) { - uploadId = rs.getInt( "ID" ); - saveMultipartMeta( uploadId, meta ); - } - statement.close(); return uploadId; - } finally { - closeConnection(); + txn.close(); } - } + } /** * Remember all the individual parts that make up the entire multipart upload so that once @@ -219,49 +145,28 @@ public class MultipartLoadDao { * @param size * @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException */ - public void savePart( int uploadId, int partNumber, String md5, String storedPath, int size ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - int id = -1; - int count = 0; - - openConnection(); + public void savePart( int uploadId, int partNumber, String md5, String storedPath, int size ) { + try { - Date tod = new Date(); - java.sql.Timestamp dateTime = new java.sql.Timestamp( tod.getTime()); + MultiPartPartsVO partVO = null; - // -> are we doing an update or an insert? (are we over writting an existing entry?) - statement = conn.prepareStatement ( "SELECT ID FROM multipart_parts WHERE UploadID=? AND partNumber=?" ); - statement.setInt( 1, uploadId ); - statement.setInt( 2, partNumber ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) id = rs.getInt( "ID" ); - statement.close(); + partVO = mpartPartsDao.findByUploadID(uploadId, partNumber); + // -> are we doing an update or an insert? (are we over writting an + // existing entry?) - if ( -1 == id ) - { - statement = conn.prepareStatement ( "INSERT INTO multipart_parts (UploadID, partNumber, MD5, StoredPath, StoredSize, CreateTime) VALUES (?,?,?,?,?,?)" ); - statement.setInt( 1, uploadId ); - statement.setInt( 2, partNumber ); - statement.setString( 3, md5 ); - statement.setString( 4, storedPath ); - statement.setInt( 5, size ); - statement.setTimestamp( 6, dateTime ); + if (null == partVO) { + MultiPartPartsVO part = new MultiPartPartsVO(uploadId, + partNumber, md5, storedPath, size, new Date()); + mpartPartsDao.persist(part); + } else { + partVO.setMd5(md5); + partVO.setStoredSize(new Long(size)); + partVO.setCreateTime(new Date()); + partVO.setUploadid(new Long(uploadId)); + partVO.setPartNumber(partNumber); + mpartPartsDao.updateParts(partVO, uploadId, partNumber); } - else - { statement = conn.prepareStatement ( "UPDATE multipart_parts SET MD5=?, StoredSize=?, CreateTime=? WHERE UploadId=? AND partNumber=?" ); - statement.setString( 1, md5 ); - statement.setInt( 2, size ); - statement.setTimestamp( 3, dateTime ); - statement.setInt( 4, uploadId ); - statement.setInt( 5, partNumber ); - } - count = statement.executeUpdate(); - statement.close(); - } finally { - closeConnection(); } } @@ -270,24 +175,8 @@ public class MultipartLoadDao { * @param uploadId * @return the value defined in the x-amz-acl header or null */ - public String getCannedAccess( int uploadId ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String access = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT x_amz_acl FROM multipart_uploads WHERE ID=?" ); - statement.setInt( 1, uploadId ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) access = rs.getString( "x_amz_acl" ); - statement.close(); - return access; - - } finally { - closeConnection(); - } + public String getCannedAccess( int uploadId ) { + return mpartUploadDao.getAtrributeValue("x_amz_acl", uploadId); } /** @@ -302,31 +191,25 @@ public class MultipartLoadDao { throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { List metaList = new ArrayList(); - PreparedStatement statement = null; int count = 0; - - openConnection(); + List metaVO; try { - statement = conn.prepareStatement ( "SELECT Name, Value FROM multipart_meta WHERE UploadID=?" ); - statement.setInt( 1, uploadId ); - ResultSet rs = statement.executeQuery(); - - while (rs.next()) - { - S3MetaDataEntry oneMeta = new S3MetaDataEntry(); - oneMeta.setName( rs.getString( "Name" )); - oneMeta.setValue( rs.getString( "Value" )); - metaList.add( oneMeta ); - count++; - } - statement.close(); + + metaVO = mpartMetaDao.getByUploadID(uploadId); + for (MultipartMetaVO multipartMetaVO : metaVO) { + S3MetaDataEntry oneMeta = new S3MetaDataEntry(); + oneMeta.setName( multipartMetaVO.getName()); + oneMeta.setValue( multipartMetaVO.getValue()); + metaList.add( oneMeta ); + count++; + } if ( 0 == count ) - return null; + return null; else return metaList.toArray(new S3MetaDataEntry[0]); } finally { - closeConnection(); + } } @@ -346,52 +229,33 @@ public class MultipartLoadDao { throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { S3MultipartUpload[] inProgress = new S3MultipartUpload[maxParts]; - PreparedStatement statement = null; boolean isTruncated = false; int i = 0; int pos = 1; - + List uploadList; // -> SQL like condition requires the '%' as a wildcard marker if (null != prefix) prefix = prefix + "%"; - StringBuffer queryStr = new StringBuffer(); - queryStr.append( "SELECT ID, AccessKey, NameKey, CreateTime FROM multipart_uploads WHERE BucketName=? " ); - if (null != prefix ) queryStr.append( "AND NameKey like ? " ); - if (null != keyMarker ) queryStr.append( "AND NameKey > ? "); - if (null != uploadIdMarker) queryStr.append( "AND ID > ? " ); - queryStr.append( "ORDER BY NameKey, CreateTime" ); - - openConnection(); - try { - statement = conn.prepareStatement ( queryStr.toString()); - statement.setString( pos++, bucketName ); - if (null != prefix ) statement.setString( pos++, prefix ); - if (null != keyMarker ) statement.setString( pos++, keyMarker ); - if (null != uploadIdMarker) statement.setString( pos, uploadIdMarker ); - ResultSet rs = statement.executeQuery(); - - while (rs.next() && i < maxParts) - { - Calendar tod = Calendar.getInstance(); - tod.setTime( rs.getTimestamp( "CreateTime" )); - inProgress[i] = new S3MultipartUpload(); - inProgress[i].setId( rs.getInt( "ID" )); - inProgress[i].setAccessKey( rs.getString( "AccessKey" )); - inProgress[i].setLastModified( tod ); - inProgress[i].setBucketName( bucketName ); - inProgress[i].setKey( rs.getString( "NameKey" )); - i++; - } - - if (rs.next()) isTruncated = true; - statement.close(); - - if (i < maxParts) inProgress = (S3MultipartUpload[])resizeArray(inProgress,i); - return new OrderedPair(inProgress, isTruncated); - - } finally { - closeConnection(); + try { + uploadList = mpartUploadDao.getInitiatedUploads(bucketName, maxParts, prefix, keyMarker, uploadIdMarker); + for (MultiPartUploadsVO uploadsVO : uploadList) { + Calendar tod = Calendar.getInstance(); + tod.setTime(uploadsVO.getCreateTime()); + inProgress[i] = new S3MultipartUpload(); + inProgress[i].setId( uploadsVO.getId().intValue()); + inProgress[i].setAccessKey(uploadsVO.getAccessKey()); + inProgress[i].setLastModified( tod ); + inProgress[i].setBucketName( bucketName ); + inProgress[i].setKey(uploadsVO.getNameKey()); + i++; + } + + if (i < maxParts) + inProgress = (S3MultipartUpload[]) resizeArray(inProgress, i); + return new OrderedPair(inProgress, + isTruncated); + }finally { } } @@ -411,41 +275,30 @@ public class MultipartLoadDao { throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { S3MultipartPart[] parts = new S3MultipartPart[maxParts]; - PreparedStatement statement = null; int i = 0; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT partNumber, MD5, StoredSize, StoredPath, CreateTime " + - "FROM multipart_parts " + - "WHERE UploadID=? " + - "AND partNumber > ? AND partNumber < ? " + - "ORDER BY partNumber" ); - statement.setInt( 1, uploadId ); - statement.setInt( 2, startAt ); - statement.setInt( 3, startAt + maxParts + 1 ); - ResultSet rs = statement.executeQuery(); - - while (rs.next() && i < maxParts) - { - Calendar tod = Calendar.getInstance(); - tod.setTime( rs.getTimestamp( "CreateTime" )); - - parts[i] = new S3MultipartPart(); - parts[i].setPartNumber( rs.getInt( "partNumber" )); - parts[i].setEtag( rs.getString( "MD5" ).toLowerCase()); - parts[i].setLastModified( tod ); - parts[i].setSize( rs.getInt( "StoredSize" )); - parts[i].setPath( rs.getString( "StoredPath" )); - i++; - } - statement.close(); - + List partsVO; + try { + + partsVO = mpartPartsDao.getParts(uploadId, startAt + maxParts + 1, startAt); + + for (MultiPartPartsVO partVO : partsVO) { + Calendar tod = Calendar.getInstance(); + tod.setTime(partVO.getCreateTime()); + + parts[i] = new S3MultipartPart(); + parts[i].setPartNumber(partVO.getPartNumber()); + parts[i].setEtag(partVO.getMd5()); + parts[i].setLastModified(tod); + parts[i].setSize(partVO.getStoredSize().intValue()); + parts[i].setPath(partVO.getStoredPath()); + i++; + } + if (i < maxParts) parts = (S3MultipartPart[])resizeArray(parts,i); return parts; } finally { - closeConnection(); + } } @@ -457,25 +310,8 @@ public class MultipartLoadDao { * @return number of parts with partNumber greater than endMarker * @throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException */ - public int numParts( int uploadId, int endMarker ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - int count = 0; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT count(*) FROM multipart_parts WHERE UploadID=? AND partNumber > ?" ); - statement.setInt( 1, uploadId ); - statement.setInt( 2, endMarker ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) count = rs.getInt( 1 ); - statement.close(); - return count; - - } finally { - closeConnection(); - } + public int numParts( int uploadId, int endMarker ) { + return mpartPartsDao.getnumParts(uploadId, endMarker); } /** @@ -485,46 +321,30 @@ public class MultipartLoadDao { * @param uploadId - defines an in-process multipart upload * @param meta - an array of meta data to be assocated with the uploadId value * - * @throws SQLException, ClassNotFoundException, IllegalAccessException, InstantiationException */ - private void saveMultipartMeta( int uploadId, S3MetaDataEntry[] meta ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - if (null == meta) return; - PreparedStatement statement = null; + private void saveMultipartMeta( int uploadId, S3MetaDataEntry[] meta ) { + if (null == meta) return; - openConnection(); + Transaction txn = null; try { + txn = Transaction.open(Transaction.AWSAPI_DB); for( int i=0; i < meta.length; i++ ) { S3MetaDataEntry entry = meta[i]; - statement = conn.prepareStatement ( "INSERT INTO multipart_meta (UploadID, Name, Value) VALUES (?,?,?)" ); - statement.setInt( 1, uploadId ); - statement.setString( 2, entry.getName()); - statement.setString( 3, entry.getValue()); - int count = statement.executeUpdate(); - statement.close(); + MultipartMetaVO metaVO = new MultipartMetaVO(); + metaVO.setUploadID(uploadId); + metaVO.setName(entry.getName()); + metaVO.setValue(entry.getValue()); + metaVO=mpartMetaDao.persist(metaVO); } - + txn.commit(); } finally { - closeConnection(); + txn.close(); } } - private void openConnection() - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - if (null == conn) { - Class.forName( "com.mysql.jdbc.Driver" ).newInstance(); - conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName, dbUser, dbPassword ); - } - } - private void closeConnection() throws SQLException { - if (null != conn) conn.close(); - conn = null; - } - - /** + /** * Reallocates an array with a new size, and copies the contents * of the old array to the new array. * diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDao.java b/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDao.java new file mode 100644 index 00000000000..449ce442dc6 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDao.java @@ -0,0 +1,12 @@ +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import com.cloud.bridge.model.MultipartMetaVO; +import com.cloud.utils.db.GenericDao; + +public interface MultipartMetaDao extends GenericDao { + + List getByUploadID(long uploadID); + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDaoImpl.java new file mode 100644 index 00000000000..cfe56c0dc08 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/MultipartMetaDaoImpl.java @@ -0,0 +1,34 @@ +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.MultipartMetaVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={MultipartMetaDao.class}) +public class MultipartMetaDaoImpl extends GenericDaoBase implements MultipartMetaDao { + + @Override + public List getByUploadID (long uploadID) { + SearchBuilder searchByUID = createSearchBuilder(); + searchByUID.and("UploadID", searchByUID.entity().getUploadID(), SearchCriteria.Op.EQ); + searchByUID.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = searchByUID.create(); + sc.setParameters("UploadID", uploadID); + return listBy(sc); + + }finally { + txn.close(); + } + + } +} + diff --git a/awsapi/src/com/cloud/bridge/persist/dao/OfferingDao.java b/awsapi/src/com/cloud/bridge/persist/dao/OfferingDao.java index 358ce3d797f..c46b015deb7 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/OfferingDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/OfferingDao.java @@ -1,169 +1,18 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.sql.Connection; -import java.sql.DriverManager; -import java.sql.PreparedStatement; -import java.sql.ResultSet; -import java.sql.SQLException; -import java.util.Properties; +import com.cloud.bridge.model.OfferingBundleVO; +import com.cloud.utils.db.GenericDao; -import org.apache.log4j.Logger; +public interface OfferingDao extends GenericDao { -import com.cloud.bridge.util.ConfigurationHelper; + int getOfferingCount(); + String getCloudOffering(String amazonEC2Offering); -public class OfferingDao extends BaseDao { - public static final Logger logger = Logger.getLogger(OfferingDao.class); + String getAmazonOffering(String cloudStackOffering); - private Connection conn = null; - - public OfferingDao() - { - } - - public int getOfferingCount() - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - int result = 0; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT count(*) FROM offering_bundle" ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) result = rs.getInt(1); - statement.close(); - return result; - } finally { - closeConnection(); - } - } - - public String getCloudOffering( String amazonEC2Offering ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String result = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT CloudStackOffering FROM offering_bundle WHERE AmazonEC2Offering=?" ); - statement.setString( 1, amazonEC2Offering ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) result = rs.getString( "CloudStackOffering" ); - statement.close(); - return result; - - } finally { - closeConnection(); - } - } - - public String getAmazonOffering( String cloudStackOffering ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - String result = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "SELECT AmazonEC2Offering FROM offering_bundle WHERE CloudStackOffering=?" ); - statement.setString( 1, cloudStackOffering ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) result = rs.getString( "AmazonEC2Offering" ); - statement.close(); - return result; - - } finally { - closeConnection(); - } - } - - public void setOfferMapping( String amazonEC2Offering, String cloudStackOffering ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - int id = -1; - int count = 0; + void setOfferMapping(String amazonEC2Offering, String cloudStackOffering); - openConnection(); - try { - // -> are we doing an update or an insert? (are we over writing an existing entry?) - statement = conn.prepareStatement ( "SELECT ID FROM offering_bundle WHERE AmazonEC2Offering=?" ); - statement.setString( 1, amazonEC2Offering ); - ResultSet rs = statement.executeQuery(); - if (rs.next()) id = rs.getInt( "ID" ); - statement.close(); + void deleteOfferMapping(String amazonEC2Offering); - if ( -1 == id ) - { - statement = conn.prepareStatement ( "INSERT INTO offering_bundle (AmazonEC2Offering, CloudStackOffering) VALUES (?,?)" ); - statement.setString( 1, amazonEC2Offering ); - statement.setString( 2, cloudStackOffering ); - } - else - { statement = conn.prepareStatement ( "UPDATE offering_bundle SET CloudStackOffering=? WHERE AmazonEC2Offering=?" ); - statement.setString( 1, cloudStackOffering ); - statement.setString( 2, amazonEC2Offering ); - } - count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - public void deleteOfferMapping( String amazonEC2Offering ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - PreparedStatement statement = null; - - openConnection(); - try { - statement = conn.prepareStatement ( "DELETE FROM offering_bundle WHERE AmazonEC2Offering=?" ); - statement.setString( 1, amazonEC2Offering ); - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - private void openConnection() - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException - { - if (null == conn) { - Class.forName( "com.mysql.jdbc.Driver" ).newInstance(); - conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + awsapi_dbName, dbUser, dbPassword ); - } - } - - private void closeConnection() throws SQLException - { - if (null != conn) conn.close(); - conn = null; - } } - diff --git a/awsapi/src/com/cloud/bridge/persist/dao/OfferingDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/OfferingDaoImpl.java new file mode 100644 index 00000000000..5a9a6250665 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/OfferingDaoImpl.java @@ -0,0 +1,135 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + + +import javax.ejb.Local; +import javax.persistence.Entity; +import javax.persistence.Table; + +import org.apache.log4j.Logger; + +import com.cloud.bridge.model.OfferingBundleVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={OfferingDao.class}) +public class OfferingDaoImpl extends GenericDaoBase implements OfferingDao { + public static final Logger logger = Logger.getLogger(OfferingDaoImpl.class); + + public OfferingDaoImpl() {} + + @Override + public int getOfferingCount() { + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + return listAll().size(); + }finally { + txn.close(); + } + + } + + @Override + public String getCloudOffering( String amazonEC2Offering ) { + + SearchBuilder searchByAmazon = createSearchBuilder(); + searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ); + searchByAmazon.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = searchByAmazon.create(); + sc.setParameters("AmazonEC2Offering", amazonEC2Offering); + return findOneBy(sc).getCloudstackOffering(); + + } finally { + txn.close(); + } + } + + @Override + public String getAmazonOffering( String cloudStackOffering ) { + + SearchBuilder searchByAmazon = createSearchBuilder(); + searchByAmazon.and("CloudStackOffering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ); + searchByAmazon.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = searchByAmazon.create(); + sc.setParameters("CloudStackOffering", cloudStackOffering); + return findOneBy(sc).getAmazonOffering(); + + } finally { + txn.close(); + } + } + + @Override + public void setOfferMapping( String amazonEC2Offering, String cloudStackOffering ) { + + SearchBuilder searchByAmazon = createSearchBuilder(); + searchByAmazon.and("CloudStackOffering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ); + searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getCloudstackOffering() , SearchCriteria.Op.EQ); + searchByAmazon.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + OfferingBundleVO offering = null; + try { + txn.start(); + SearchCriteria sc = searchByAmazon.create(); + sc.setParameters("CloudStackOffering", cloudStackOffering); + sc.setParameters("AmazonEC2Offering", amazonEC2Offering); + offering = findOneBy(sc); + if (null == offering) { + offering = new OfferingBundleVO(); + } + offering.setAmazonOffering(amazonEC2Offering); + offering.setCloudstackOffering(cloudStackOffering); + if (null == offering) + offering = persist(offering); + else + update(offering.getID(), offering); + + txn.commit(); + } finally { + txn.close(); + } + + } + + @Override + public void deleteOfferMapping( String amazonEC2Offering ) { + SearchBuilder searchByAmazon = createSearchBuilder(); + searchByAmazon.and("AmazonEC2Offering", searchByAmazon.entity().getAmazonOffering() , SearchCriteria.Op.EQ); + searchByAmazon.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = searchByAmazon.create(); + sc.setParameters("AmazonEC2Offering", amazonEC2Offering); + remove(sc); + txn.commit(); + } finally { + txn.close(); + } + } + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SAclDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SAclDao.java index d62ae5b9304..5a5be6e856a 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SAclDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SAclDao.java @@ -1,76 +1,21 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import java.util.Date; import java.util.List; import com.cloud.bridge.model.SAcl; -import com.cloud.bridge.persist.EntityDao; -import com.cloud.bridge.persist.PersistContext; +import com.cloud.bridge.model.SAclVO; import com.cloud.bridge.service.core.s3.S3AccessControlList; import com.cloud.bridge.service.core.s3.S3Grant; +import com.cloud.utils.db.GenericDao; -public class SAclDao extends EntityDao { - - public SAclDao() { - super(SAcl.class); - } - - public List listGrants(String target, long targetId) { - return queryEntities("from SAcl where target=? and targetId=? order by grantOrder asc", - new Object[] { target, new Long(targetId)}); - } +public interface SAclDao extends GenericDao { - public List listGrants(String target, long targetId, String userCanonicalId) { - return queryEntities("from SAcl where target=? and targetId=? and granteeCanonicalId=? order by grantOrder asc", - new Object[] { target, new Long(targetId), userCanonicalId }); - } + List listGrants(String target, long targetId, String userCanonicalId); + + void save(String target, long targetId, S3AccessControlList acl); + + SAcl save(String target, long targetId, S3Grant grant, int grantOrder); + + List listGrants(String target, long targetId); - public void save(String target, long targetId, S3AccessControlList acl) { - // -> the target's ACLs are being redefined - executeUpdate("delete from SAcl where target=? and targetId=?", new Object[] { target, new Long(targetId)}); - - if(acl != null) { - S3Grant[] grants = acl.getGrants(); - if(grants != null && grants.length > 0) { - int grantOrder = 1; - for(S3Grant grant : grants) { - save(target, targetId, grant, grantOrder++); - } - } - } - } - - public SAcl save(String target, long targetId, S3Grant grant, int grantOrder) { - SAcl aclEntry = new SAcl(); - aclEntry.setTarget(target); - aclEntry.setTargetId(targetId); - aclEntry.setGrantOrder(grantOrder); - - int grantee = grant.getGrantee(); - aclEntry.setGranteeType(grantee); - aclEntry.setPermission(grant.getPermission()); - aclEntry.setGranteeCanonicalId(grant.getCanonicalUserID()); - - Date ts = new Date(); - aclEntry.setCreateTime(ts); - aclEntry.setLastModifiedTime(ts); - PersistContext.getSession().save(aclEntry); - return aclEntry; - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SAclDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SAclDaoImpl.java new file mode 100644 index 00000000000..c73c1db02b1 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SAclDaoImpl.java @@ -0,0 +1,127 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.util.Date; +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SAcl; +import com.cloud.bridge.model.SAclVO; +import com.cloud.bridge.service.core.s3.S3AccessControlList; +import com.cloud.bridge.service.core.s3.S3Grant; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SAclDao.class}) +public class SAclDaoImpl extends GenericDaoBase implements SAclDao { + + public SAclDaoImpl() {} + + @Override + public List listGrants(String target, long targetId) { + SearchBuilder SearchByTarget = createSearchBuilder(); + SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ); + SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ); + SearchByTarget.done(); + Filter filter = new Filter(SAclVO.class, "grantOrder", Boolean.TRUE, null, null); + Transaction txn = Transaction.open( Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByTarget.create(); + sc.setParameters("Target", target); + sc.setParameters("TargetID", targetId); + return listBy(sc, filter); + + } finally { + txn.close(); + } + } + + @Override + public List listGrants(String target, long targetId, String userCanonicalId) { + SearchBuilder SearchByAcl = createSearchBuilder(); + SearchByAcl.and("Target", SearchByAcl.entity().getTarget(), SearchCriteria.Op.EQ); + SearchByAcl.and("TargetID", SearchByAcl.entity().getTargetId(), SearchCriteria.Op.EQ); + SearchByAcl.and("GranteeCanonicalID", SearchByAcl.entity().getGranteeCanonicalId(), SearchCriteria.Op.EQ); + Filter filter = new Filter(SAclVO.class, "grantOrder", Boolean.TRUE, null, null); + Transaction txn = Transaction.open( Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByAcl.create(); + sc.setParameters("Target", target); + sc.setParameters("TargetID", targetId); + sc.setParameters("GranteeCanonicalID", userCanonicalId); + return listBy(sc, filter); + } finally { + txn.close(); + } + } + + @Override + public void save(String target, long targetId, S3AccessControlList acl) { + SearchBuilder SearchByTarget = createSearchBuilder(); + SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ); + SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ); + + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByTarget.create(); + sc.setParameters("Target", target); + sc.setParameters("TargetID", targetId); + this.remove(sc); + if(acl != null) { + S3Grant[] grants = acl.getGrants(); + if(grants != null && grants.length > 0) { + int grantOrder = 1; + for(S3Grant grant : grants) { + save(target, targetId, grant, grantOrder++); + } + } + } + txn.commit(); + } finally { + txn.close(); + } + + + } + + @Override + public SAcl save(String target, long targetId, S3Grant grant, int grantOrder) { + SAclVO aclEntry = new SAclVO(); + aclEntry.setTarget(target); + aclEntry.setTargetId(targetId); + aclEntry.setGrantOrder(grantOrder); + + int grantee = grant.getGrantee(); + aclEntry.setGranteeType(grantee); + aclEntry.setPermission(grant.getPermission()); + aclEntry.setGranteeCanonicalId(grant.getCanonicalUserID()); + + Date ts = new Date(); + aclEntry.setCreateTime(ts); + aclEntry.setLastModifiedTime(ts); + aclEntry = this.persist(aclEntry); + return aclEntry; + } +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SBucketDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SBucketDao.java index 833bf6a7271..0f4220019d1 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SBucketDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SBucketDao.java @@ -1,37 +1,14 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; import java.util.List; -import com.cloud.bridge.model.SBucket; -import com.cloud.bridge.persist.EntityDao; +import com.cloud.bridge.model.SBucketVO; +import com.cloud.utils.db.GenericDao; -public class SBucketDao extends EntityDao { - public SBucketDao() { - super(SBucket.class); - } +public interface SBucketDao extends GenericDao { + + SBucketVO getByName(String bucketName); + + List listBuckets(String canonicalId); - public SBucket getByName(String bucketName) { - return queryEntity("from SBucket where name=?", new Object[] {bucketName}); - } - - public List listBuckets(String canonicalId) { - return queryEntities("from SBucket where ownerCanonicalId=? order by createTime asc", - new Object[] {canonicalId}); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SBucketDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SBucketDaoImpl.java new file mode 100644 index 00000000000..53e9e07fb1d --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SBucketDaoImpl.java @@ -0,0 +1,72 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SBucket; +import com.cloud.bridge.model.SBucketVO; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SBucketDao.class}) +public class SBucketDaoImpl extends GenericDaoBase implements SBucketDao { + + public SBucketDaoImpl() { + } + + @Override + public SBucketVO getByName(String bucketName) { + SearchBuilder SearchByName = createSearchBuilder(); + SearchByName.and("Name", SearchByName.entity().getName(), SearchCriteria.Op.EQ); + //Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = SearchByName.create(); + sc.setParameters("Name", bucketName); + return findOneBy(sc); + + }finally { + txn.close(); + } + } + + @Override + public List listBuckets(String canonicalId) { + SearchBuilder ByCanonicalID = createSearchBuilder(); + ByCanonicalID.and("OwnerCanonicalID", ByCanonicalID.entity().getOwnerCanonicalId(), SearchCriteria.Op.EQ); + Filter filter = new Filter(SBucketVO.class, "createTime", Boolean.TRUE, null, null); + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = ByCanonicalID.create(); + sc.setParameters("OwnerCanonicalID", canonicalId); + return listBy(sc, filter); + }finally { + txn.close(); + } + + } + + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SHostDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SHostDao.java index 1108bcbb037..fc8865c90f5 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SHostDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SHostDao.java @@ -1,35 +1,12 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import com.cloud.bridge.model.SHost; -import com.cloud.bridge.persist.EntityDao; +import com.cloud.bridge.model.SHostVO; +import com.cloud.utils.db.GenericDao; + +public interface SHostDao extends GenericDao { + + SHostVO getByHost(String host); + + SHostVO getLocalStorageHost(long mhostId, String storageRoot); -public class SHostDao extends EntityDao { - public SHostDao() { - super(SHost.class); - } - - public SHost getByHost(String host) { - return queryEntity("from SHost where host=?", new Object[] { host }); - } - - public SHost getLocalStorageHost(long mhostId, String storageRoot) { - return queryEntity("from SHost where mhost=? and exportRoot=?", - new Object[] { new Long(mhostId), storageRoot}); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SHostDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SHostDaoImpl.java new file mode 100644 index 00000000000..efe9500d9b9 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SHostDaoImpl.java @@ -0,0 +1,67 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SHostVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SHostDao.class}) +public class SHostDaoImpl extends GenericDaoBase implements SHostDao { + public SHostDaoImpl() {} + + @Override + public SHostVO getByHost(String host) { + SearchBuilder HostSearch = createSearchBuilder(); + HostSearch.and("Host", HostSearch.entity().getHost(), SearchCriteria.Op.EQ); + HostSearch.done(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = HostSearch.create(); + sc.setParameters("Host", host); + return findOneBy(sc); + + }finally { + txn.close(); + } + + } + + @Override + public SHostVO getLocalStorageHost(long mhostId, String storageRoot) { + SearchBuilder LocalStorageHostSearch = createSearchBuilder(); + LocalStorageHostSearch.and("MHostID", LocalStorageHostSearch.entity().getMhostid(), SearchCriteria.Op.EQ); + LocalStorageHostSearch.and("ExportRoot", LocalStorageHostSearch.entity().getExportRoot(), SearchCriteria.Op.EQ); + LocalStorageHostSearch.done(); + Transaction txn = Transaction.currentTxn(); + try { + txn.start(); + SearchCriteria sc = LocalStorageHostSearch.create(); + sc.setParameters("MHostID", mhostId); + sc.setParameters("ExportRoot", storageRoot); + return findOneBy(sc); + + }finally { + txn.close(); + } + } +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SMetaDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SMetaDao.java index b781bbb6d4f..225138e6351 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SMetaDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SMetaDao.java @@ -1,55 +1,17 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; import java.util.List; -import com.cloud.bridge.model.SMeta; -import com.cloud.bridge.persist.EntityDao; -import com.cloud.bridge.persist.PersistContext; +import com.cloud.bridge.model.SMetaVO; import com.cloud.bridge.service.core.s3.S3MetaDataEntry; +import com.cloud.utils.db.GenericDao; -public class SMetaDao extends EntityDao { - public SMetaDao() { - super(SMeta.class); - } - - public List getByTarget(String target, long targetId) { - return queryEntities("from SMeta where target=? and targetId=?", new Object[] {target, targetId}); - } +public interface SMetaDao extends GenericDao { - public SMeta save(String target, long targetId, S3MetaDataEntry entry) { - SMeta meta = new SMeta(); - meta.setTarget(target); - meta.setTargetId(targetId); - meta.setName(entry.getName()); - meta.setValue(entry.getValue()); - - PersistContext.getSession().save(meta); - return meta; - } - - public void save(String target, long targetId, S3MetaDataEntry[] entries) { - // To redefine the target's metadaa - executeUpdate("delete from SMeta where target=? and targetId=?", new Object[] { target, new Long(targetId)}); + List getByTarget(String target, long targetId); + + SMetaVO save(String target, long targetId, S3MetaDataEntry entry); + + void save(String target, long targetId, S3MetaDataEntry[] entries); - if(entries != null) { - for(S3MetaDataEntry entry : entries) - save(target, targetId, entry); - } - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SMetaDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SMetaDaoImpl.java new file mode 100644 index 00000000000..e0555ecd42f --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SMetaDaoImpl.java @@ -0,0 +1,88 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SMetaVO; +import com.cloud.bridge.service.core.s3.S3MetaDataEntry; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SMetaDao.class}) +public class SMetaDaoImpl extends GenericDaoBase implements SMetaDao { + + public SMetaDaoImpl() {} + + @Override + public List getByTarget(String target, long targetId) { + SearchBuilder SearchByTarget = createSearchBuilder(); + SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ); + SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ); + SearchByTarget.done(); + Transaction txn = Transaction.open( Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByTarget.create(); + sc.setParameters("Target", target); + sc.setParameters("TargetID", targetId); + return listBy(sc); + } finally { + txn.close(); + } + + } + + @Override + public SMetaVO save(String target, long targetId, S3MetaDataEntry entry) { + SMetaVO meta = new SMetaVO(); + meta.setTarget(target); + meta.setTargetId(targetId); + meta.setName(entry.getName()); + meta.setValue(entry.getValue()); + meta = this.persist(meta); + return meta; + } + + @Override + public void save(String target, long targetId, S3MetaDataEntry[] entries) { + // To redefine the target's metadaa + SearchBuilder SearchByTarget = createSearchBuilder(); + SearchByTarget.and("Target", SearchByTarget.entity().getTarget(), SearchCriteria.Op.EQ); + SearchByTarget.and("TargetID", SearchByTarget.entity().getTargetId(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByTarget.create(); + sc.setParameters("Target", target); + sc.setParameters("TargetID", targetId); + this.remove(sc); + + if(entries != null) { + for(S3MetaDataEntry entry : entries) + save(target, targetId, entry); + } + txn.commit(); + }finally { + txn.close(); + } + } +} \ No newline at end of file diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SObjectDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SObjectDao.java index 3d87cbe5036..42fcd02260d 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SObjectDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SObjectDao.java @@ -1,76 +1,19 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import java.util.ArrayList; import java.util.List; -import com.cloud.bridge.model.SBucket; -import com.cloud.bridge.model.SObject; -import com.cloud.bridge.persist.EntityDao; -import com.cloud.bridge.util.EntityParam; +import com.cloud.bridge.model.SBucketVO; +import com.cloud.bridge.model.SObjectVO; +import com.cloud.utils.db.GenericDao; -public class SObjectDao extends EntityDao { - public SObjectDao() { - super(SObject.class); - } +public interface SObjectDao extends GenericDao { - public SObject getByNameKey(SBucket bucket, String nameKey) { - return queryEntity("from SObject where bucket=? and nameKey=?", - new Object[] { new EntityParam(bucket), nameKey }); - } - - public List listBucketObjects(SBucket bucket, String prefix, String marker, int maxKeys) { - StringBuffer sb = new StringBuffer(); - List params = new ArrayList(); + List listBucketObjects(SBucketVO bucket, String prefix, + String marker, int maxKeys); - sb.append("from SObject o left join fetch o.items where deletionMark is null and o.bucket=?"); - params.add(new EntityParam(bucket)); - - if(prefix != null && !prefix.isEmpty()) { - sb.append(" and o.nameKey like ?"); - params.add(new String(prefix + "%")); - } - - if(marker != null && !marker.isEmpty()) { - sb.append(" and o.nameKey > ?"); - params.add(marker); - } - - return queryEntities(sb.toString(), 0, maxKeys, params.toArray()); - } - - public List listAllBucketObjects(SBucket bucket, String prefix, String marker, int maxKeys) { - StringBuffer sb = new StringBuffer(); - List params = new ArrayList(); + List listAllBucketObjects(SBucketVO bucket, String prefix, + String marker, int maxKeys); + + SObjectVO getByNameKey(SBucketVO bucket, String nameKey); - sb.append("from SObject o left join fetch o.items where o.bucket=?"); - params.add(new EntityParam(bucket)); - - if(prefix != null && !prefix.isEmpty()) { - sb.append(" and o.nameKey like ?"); - params.add(new String(prefix + "%")); - } - - if(marker != null && !marker.isEmpty()) { - sb.append(" and o.nameKey > ?"); - params.add(marker); - } - - return queryEntities(sb.toString(), 0, maxKeys, params.toArray()); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SObjectDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SObjectDaoImpl.java new file mode 100644 index 00000000000..b588877f06c --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SObjectDaoImpl.java @@ -0,0 +1,119 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.util.ArrayList; +import java.util.HashSet; +import java.util.List; +import java.util.Set; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SBucket; +import com.cloud.bridge.model.SBucketVO; +import com.cloud.bridge.model.SObjectItemVO; +import com.cloud.bridge.model.SObjectVO; +import com.cloud.bridge.util.EntityParam; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.Filter; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SObjectDao.class}) +public class SObjectDaoImpl extends GenericDaoBase implements SObjectDao { + protected final SObjectItemDao itemDao = ComponentLocator.inject(SObjectItemDaoImpl.class); + + public SObjectDaoImpl() {} + + @Override + public SObjectVO getByNameKey(SBucketVO bucket, String nameKey) { + SObjectVO object = null; + SearchBuilder SearchByName = createSearchBuilder(); + SearchByName.and("SBucketID", SearchByName.entity().getBucketID() , SearchCriteria.Op.EQ); + SearchByName.and("NameKey", SearchByName.entity().getNameKey() , SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByName.create(); + sc.setParameters("SBucketID", bucket.getId()); + sc.setParameters("NameKey", nameKey); + object = findOneBy(sc); + if (null != object) { + Set items = new HashSet( + itemDao.getItems(object.getId())); + object.setItems(items); + } + return object; + + }finally { + txn.close(); + } + + } + + @Override + public List listBucketObjects(SBucketVO bucket, String prefix, String marker, int maxKeys) { + StringBuffer sb = new StringBuffer(); + List params = new ArrayList(); + SearchBuilder SearchByBucket = createSearchBuilder(); + List objects = new ArrayList(); + + SearchByBucket.and("SBucketID", SearchByBucket.entity().getBucketID(), SearchCriteria.Op.EQ); + SearchByBucket.and("DeletionMark", SearchByBucket.entity().getDeletionMark(), SearchCriteria.Op.NULL); + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = SearchByBucket.create(); + sc.setParameters("SBucketID", bucket.getId()); + objects = listBy(sc); + for (SObjectVO sObjectVO : objects) { + Set items = new HashSet(itemDao.getItems(sObjectVO.getId())); + sObjectVO.setItems(items); + } + return objects; + }finally { + txn.close(); + } + } + + @Override + public List listAllBucketObjects(SBucketVO bucket, String prefix, String marker, int maxKeys) { + StringBuffer sb = new StringBuffer(); + List params = new ArrayList(); + SearchBuilder getAllBuckets = createSearchBuilder(); + List objects = new ArrayList(); + getAllBuckets.and("SBucketID", getAllBuckets.entity().getBucketID(), SearchCriteria.Op.EQ); + + Transaction txn = Transaction.currentTxn(); // Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); + try { + txn.start(); + SearchCriteria sc = getAllBuckets.create(); + sc.setParameters("SBucketID", bucket.getId()); + objects = listBy(sc); + for (SObjectVO sObjectVO : objects) { + Set items = new HashSet(itemDao.getItems(sObjectVO.getId())); + sObjectVO.setItems(items); + } + return objects; + }finally { + txn.close(); + } + + } +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDao.java b/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDao.java index f91d180d0ac..2258309744d 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDao.java @@ -1,30 +1,14 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import com.cloud.bridge.model.SObjectItem; -import com.cloud.bridge.persist.EntityDao; +import java.util.List; + +import com.cloud.bridge.model.SObjectItemVO; +import com.cloud.utils.db.GenericDao; + +public interface SObjectItemDao extends GenericDao { + + SObjectItemVO getByObjectIdNullVersion(long id); + + List getItems(long sobjectID); -public class SObjectItemDao extends EntityDao { - public SObjectItemDao() { - super(SObjectItem.class); - } - - public SObjectItem getByObjectIdNullVersion(long id) { - return queryEntity("from SObjectItem where theObject=? and version is null", new Object[] { id }); - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDaoImpl.java new file mode 100644 index 00000000000..ec632bbd244 --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/SObjectItemDaoImpl.java @@ -0,0 +1,71 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.util.List; + +import javax.ejb.Local; + +import com.cloud.bridge.model.SObjectItemVO; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={SObjectItemDao.class}) +public class SObjectItemDaoImpl extends GenericDaoBase implements SObjectItemDao { + + + public SObjectItemDaoImpl() { + } + + @Override + public SObjectItemVO getByObjectIdNullVersion(long id) { + + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + SearchBuilder SearchByID = createSearchBuilder(); + SearchByID.and("ID", SearchByID.entity().getId(), SearchCriteria.Op.EQ); + + try { + txn.start(); + SearchCriteria sc = SearchByID.create(); + sc.setParameters("ID", id); + return findOneBy(sc); + }finally { + txn.close(); + } + } + + @Override + public List getItems(long sobjectID) { + + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + SearchBuilder SearchBySobjectID = createSearchBuilder(); + SearchBySobjectID.and("SObjectID", SearchBySobjectID.entity().getId(), SearchCriteria.Op.EQ); + + try { + txn.start(); + SearchCriteria sc = SearchBySobjectID.create(); + sc.setParameters("SObjectID", sobjectID); + return listBy(sc); + //findOneIncludingRemovedBy(sc); + } finally { + txn.close(); + } + } + +} diff --git a/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDao.java b/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDao.java index 4beb7a3604f..c178bf8ef45 100644 --- a/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDao.java +++ b/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDao.java @@ -1,169 +1,12 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. package com.cloud.bridge.persist.dao; -import java.sql.*; +import com.cloud.bridge.model.UserCredentialsVO; +import com.cloud.utils.db.GenericDao; -import org.apache.log4j.Logger; +public interface UserCredentialsDao extends GenericDao { -import com.cloud.bridge.model.UserCredentials; -import com.cloud.bridge.service.exception.NoSuchObjectException; + UserCredentialsVO getByAccessKey(String cloudAccessKey); + UserCredentialsVO getByCertUniqueId(String certId); -public class UserCredentialsDao extends BaseDao{ - public static final Logger logger = Logger.getLogger(UserCredentialsDao.class); - - private Connection conn = null; - - public UserCredentialsDao() { - } - - public void setUserKeys( String cloudAccessKey, String cloudSecretKey ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - UserCredentials user = getByAccessKey( cloudAccessKey ); - PreparedStatement statement = null; - - openConnection(); - try { - if ( null == user ) { - // -> do an insert since the user does not exist yet - statement = conn.prepareStatement ( "INSERT INTO usercredentials (AccessKey, SecretKey) VALUES(?,?)" ); - statement.setString( 1, cloudAccessKey ); - statement.setString( 2, cloudSecretKey ); - } - else { - // -> do an update since the user exists - statement = conn.prepareStatement ( "UPDATE usercredentials SET SecretKey=? WHERE AccessKey=?" ); - statement.setString( 1, cloudSecretKey ); - statement.setString( 2, cloudAccessKey ); - } - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - public void setCertificateId( String cloudAccessKey, String certId ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - UserCredentials user = getByAccessKey( cloudAccessKey ); - PreparedStatement statement = null; - - if (null == user) throw new NoSuchObjectException( "Cloud API Access Key [" + cloudAccessKey + "] is unknown" ); - - openConnection(); - try { - statement = conn.prepareStatement ( "UPDATE usercredentials SET CertUniqueId=? WHERE AccessKey=?" ); - statement.setString( 1, certId ); - statement.setString( 2, cloudAccessKey ); - int count = statement.executeUpdate(); - statement.close(); - - } finally { - closeConnection(); - } - } - - public UserCredentials getByAccessKey( String cloudAccessKey ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - openConnection(); - - UserCredentials user = null; - - try { - PreparedStatement statement = conn.prepareStatement ( "SELECT SecretKey, CertUniqueId FROM usercredentials WHERE AccessKey=?" ); - statement.setString( 1, cloudAccessKey ); - statement.executeQuery(); - ResultSet rs = statement.getResultSet (); - if (rs.next()) { - user = new UserCredentials(); - user.setAccessKey( cloudAccessKey ); - user.setSecretKey( rs.getString( "SecretKey" )); - user.setCertUniqueId( rs.getString( "CertUniqueId" )); - } - - } finally { - closeConnection(); - } - return user; - } - - public UserCredentials getByCertUniqueId( String certId ) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - openConnection(); - - UserCredentials user = null; - - try { - PreparedStatement statement = conn.prepareStatement ( "SELECT AccessKey, SecretKey FROM usercredentials WHERE CertUniqueId=?" ); - statement.setString( 1, certId ); - statement.executeQuery(); - ResultSet rs = statement.getResultSet (); - if (rs.next()) { - user = new UserCredentials(); - user.setAccessKey( rs.getString( "AccessKey" )); - user.setSecretKey( rs.getString( "SecretKey" )); - user.setCertUniqueId( certId ); - } - - } finally { - closeConnection(); - } - return user; - } - - private void openConnection() - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - if (null == conn) { - Class.forName( "com.mysql.jdbc.Driver" ).newInstance(); - conn = DriverManager.getConnection( "jdbc:mysql://" + dbHost + "/" + awsapi_dbName, dbUser, dbPassword ); - } - } - - private void closeConnection() throws SQLException { - if (null != conn) conn.close(); - conn = null; - } - - public static void preCheckTableExistence() throws Exception{ - UserCredentialsDao dao = new UserCredentialsDao(); - dao.checkTableExistence(); - } - - private void checkTableExistence() throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { - openConnection(); - - try { - PreparedStatement statement = conn.prepareStatement ( "SELECT * FROM usercredentials " ); - statement.executeQuery(); - ResultSet rs = statement.getResultSet (); - if (rs.next()) { - return; - } - return; - - } catch(Exception e) { - Statement statement = conn.createStatement(); - statement.execute( "create table usercredentials(id integer auto_increment primary key, AccessKey varchar(1000), SecretKey varchar(1000), CertUniqueId varchar(1000))" ); - statement.close(); - } - finally{ - closeConnection(); - } - } } diff --git a/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDaoImpl.java b/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDaoImpl.java new file mode 100644 index 00000000000..c19c757355b --- /dev/null +++ b/awsapi/src/com/cloud/bridge/persist/dao/UserCredentialsDaoImpl.java @@ -0,0 +1,73 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +package com.cloud.bridge.persist.dao; + +import java.sql.*; + +import javax.ejb.Local; + +import org.apache.log4j.Logger; + +import com.cloud.bridge.model.UserCredentialsVO; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.GenericDaoBase; +import com.cloud.utils.db.SearchBuilder; +import com.cloud.utils.db.SearchCriteria; +import com.cloud.utils.db.Transaction; + +@Local(value={UserCredentialsDao.class}) +public class UserCredentialsDaoImpl extends GenericDaoBase implements UserCredentialsDao { + public static final Logger logger = Logger.getLogger(UserCredentialsDaoImpl.class); + + public UserCredentialsDaoImpl() {} + + @DB + @Override + public UserCredentialsVO getByAccessKey( String cloudAccessKey ) { + SearchBuilder SearchByAccessKey = createSearchBuilder(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchByAccessKey.and("AccessKey", SearchByAccessKey.entity() + .getAccessKey(), SearchCriteria.Op.EQ); + SearchByAccessKey.done(); + SearchCriteria sc = SearchByAccessKey.create(); + sc.setParameters("AccessKey", cloudAccessKey); + return findOneBy(sc); + }finally { + txn.commit(); + txn.close(); + } + } + + @Override + public UserCredentialsVO getByCertUniqueId( String certId ) { + SearchBuilder SearchByCertID = createSearchBuilder(); + SearchByCertID.and("CertUniqueId", SearchByCertID.entity().getCertUniqueId(), SearchCriteria.Op.EQ); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + SearchCriteria sc = SearchByCertID.create(); + sc.setParameters("CertUniqueId", certId); + return findOneBy(sc); + }finally { + txn.close(); + } + + } + +} diff --git a/awsapi/src/com/cloud/bridge/service/EC2MainServlet.java b/awsapi/src/com/cloud/bridge/service/EC2MainServlet.java index 0c904a134ac..dceb665d1f2 100644 --- a/awsapi/src/com/cloud/bridge/service/EC2MainServlet.java +++ b/awsapi/src/com/cloud/bridge/service/EC2MainServlet.java @@ -27,12 +27,18 @@ import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; +import org.apache.log4j.Logger; -import com.cloud.bridge.persist.PersistContext; import com.cloud.bridge.persist.dao.CloudStackConfigurationDao; -import com.cloud.bridge.persist.dao.UserCredentialsDao; +import com.cloud.bridge.persist.dao.CloudStackConfigurationDaoImpl; import com.cloud.bridge.util.ConfigurationHelper; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.component.Inject; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.Transaction; +import net.sf.ehcache.Cache; +@DB public class EC2MainServlet extends HttpServlet{ private static final long serialVersionUID = 2201599478145974479L; @@ -41,23 +47,23 @@ public class EC2MainServlet extends HttpServlet{ public static final String EC2_SOAP_SERVLET_PATH="/services/AmazonEC2/"; public static final String ENABLE_EC2_API="enable.ec2.api"; private static boolean isEC2APIEnabled = false; + public static final Logger logger = Logger.getLogger(EC2MainServlet.class); + CloudStackConfigurationDao csDao = ComponentLocator.inject(CloudStackConfigurationDaoImpl.class); /** * We build the path to where the keystore holding the WS-Security X509 certificates * are stored. */ + @DB public void init( ServletConfig config ) throws ServletException { try{ - ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext()); - UserCredentialsDao.preCheckTableExistence(); + ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext()); // check if API is enabled - CloudStackConfigurationDao csDao = new CloudStackConfigurationDao(); String value = csDao.getConfigValue(ENABLE_EC2_API); if(value != null){ isEC2APIEnabled = Boolean.valueOf(value); } - PersistContext.commitTransaction(true); - PersistContext.closeSession(true); + logger.info("Value of EC2 API Flag ::" + value); }catch(Exception e){ throw new ServletException("Error initializing awsapi: " + e.getMessage()); } diff --git a/awsapi/src/com/cloud/bridge/service/EC2RestServlet.java b/awsapi/src/com/cloud/bridge/service/EC2RestServlet.java index ceaf59d4abd..4f748731504 100644 --- a/awsapi/src/com/cloud/bridge/service/EC2RestServlet.java +++ b/awsapi/src/com/cloud/bridge/service/EC2RestServlet.java @@ -94,10 +94,9 @@ import com.amazon.ec2.RunInstancesResponse; import com.amazon.ec2.StartInstancesResponse; import com.amazon.ec2.StopInstancesResponse; import com.amazon.ec2.TerminateInstancesResponse; -import com.cloud.bridge.model.UserCredentials; -import com.cloud.bridge.persist.PersistContext; -import com.cloud.bridge.persist.dao.OfferingDao; -import com.cloud.bridge.persist.dao.UserCredentialsDao; +import com.cloud.bridge.model.UserCredentialsVO; +import com.cloud.bridge.persist.dao.OfferingDaoImpl; +import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl; import com.cloud.bridge.service.controller.s3.ServiceProvider; import com.cloud.bridge.service.core.ec2.EC2AssociateAddress; import com.cloud.bridge.service.core.ec2.EC2AuthorizeRevokeSecurityGroup; @@ -140,11 +139,15 @@ import com.cloud.bridge.util.AuthenticationUtils; import com.cloud.bridge.util.ConfigurationHelper; import com.cloud.bridge.util.EC2RestAuth; import com.cloud.stack.models.CloudStackAccount; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.Transaction; public class EC2RestServlet extends HttpServlet { private static final long serialVersionUID = -6168996266762804888L; + protected final UserCredentialsDaoImpl ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class); + protected final OfferingDaoImpl ofDao = ComponentLocator.inject(OfferingDaoImpl.class); public static final Logger logger = Logger.getLogger(EC2RestServlet.class); @@ -278,8 +281,6 @@ public class EC2RestServlet extends HttpServlet { logger.error("Unsupported action " + action); throw new EC2ServiceException(ClientError.Unsupported, "This operation is not available"); } - PersistContext.commitTransaction(); - PersistContext.commitTransaction(true); } catch( EC2ServiceException e ) { response.setStatus(e.getErrorCode()); @@ -306,8 +307,6 @@ public class EC2RestServlet extends HttpServlet { } catch (IOException e) { logger.error("Unexpected exception " + e.getMessage(), e); } - PersistContext.closeSession(); - PersistContext.closeSession(true); } } @@ -343,7 +342,7 @@ public class EC2RestServlet extends HttpServlet { private void setUserKeys( HttpServletRequest request, HttpServletResponse response ) { String[] accessKey = null; String[] secretKey = null; - + Transaction txn = null; try { // -> all these parameters are required accessKey = request.getParameterValues( "accesskey" ); @@ -369,15 +368,20 @@ public class EC2RestServlet extends HttpServlet { UserContext context = UserContext.current(); try { + txn = Transaction.open(Transaction.AWSAPI_DB); // -> use the keys to see if the account actually exists ServiceProvider.getInstance().getEC2Engine().validateAccount( accessKey[0], secretKey[0] ); - UserCredentialsDao credentialDao = new UserCredentialsDao(); - credentialDao.setUserKeys( accessKey[0], secretKey[0] ); - +/* UserCredentialsDao credentialDao = new UserCredentialsDao(); + credentialDao.setUserKeys( ); +*/ UserCredentialsVO user = new UserCredentialsVO(accessKey[0], secretKey[0]); + ucDao.persist(user); + txn.commit(); + } catch( Exception e ) { logger.error("SetUserKeys " + e.getMessage(), e); response.setStatus(401); endResponse(response, e.toString()); + txn.close(); return; } response.setStatus(200); @@ -402,6 +406,7 @@ public class EC2RestServlet extends HttpServlet { */ private void setCertificate( HttpServletRequest request, HttpServletResponse response ) throws Exception { + Transaction txn = null; try { // [A] Pull the cert and cloud AccessKey from the request String[] certificate = request.getParameterValues( "cert" ); @@ -437,10 +442,16 @@ public class EC2RestServlet extends HttpServlet { // [C] Associate the cert's uniqueId with the Cloud API keys String uniqueId = AuthenticationUtils.X509CertUniqueId( userCert ); logger.debug( "SetCertificate, uniqueId: " + uniqueId ); - UserCredentialsDao credentialDao = new UserCredentialsDao(); - credentialDao.setCertificateId( accessKey[0], uniqueId ); - response.setStatus(200); +/* UserCredentialsDao credentialDao = new UserCredentialsDao(); + credentialDao.setCertificateId( accessKey[0], uniqueId ); +*/ + txn = Transaction.open(Transaction.AWSAPI_DB); + UserCredentialsVO user = ucDao.getByAccessKey(accessKey[0]); + user.setCertUniqueId(uniqueId); + ucDao.update(user.getId(), user); + response.setStatus(200); endResponse(response, "User certificate set successfully"); + txn.commit(); } catch( NoSuchObjectException e ) { logger.error("SetCertificate exception " + e.getMessage(), e); @@ -449,7 +460,10 @@ public class EC2RestServlet extends HttpServlet { } catch( Exception e ) { logger.error("SetCertificate exception " + e.getMessage(), e); response.sendError(500, "SetCertificate exception " + e.getMessage()); + } finally { + txn.close(); } + } /** @@ -464,7 +478,8 @@ public class EC2RestServlet extends HttpServlet { * algorithm. */ private void deleteCertificate( HttpServletRequest request, HttpServletResponse response ) - throws Exception { + throws Exception { + Transaction txn = null; try { String [] accessKey = request.getParameterValues( "AWSAccessKeyId" ); if ( null == accessKey || 0 == accessKey.length ) { @@ -483,10 +498,16 @@ public class EC2RestServlet extends HttpServlet { certStore.store( fsOut, keystorePassword.toCharArray()); // -> dis-associate the cert's uniqueId with the Cloud API keys - UserCredentialsDao credentialDao = new UserCredentialsDao(); - credentialDao.setCertificateId( accessKey[0], null ); +/* UserCredentialsDao credentialDao = new UserCredentialsDao(); + credentialDao.setCertificateId( accessKey[0], null ); + +*/ txn = Transaction.open(Transaction.AWSAPI_DB); + UserCredentialsVO user = ucDao.getByAccessKey(accessKey[0]); + user.setCertUniqueId(null); + ucDao.update(user.getId(), user); response.setStatus(200); - endResponse(response, "User certificate deleted successfully"); + endResponse(response, "User certificate deleted successfully"); + txn.commit(); } else response.setStatus(404); @@ -497,6 +518,8 @@ public class EC2RestServlet extends HttpServlet { } catch( Exception e ) { logger.error("DeleteCertificate exception " + e.getMessage(), e); response.sendError(500, "DeleteCertificate exception " + e.getMessage()); + } finally { + txn.close(); } } @@ -547,7 +570,7 @@ public class EC2RestServlet extends HttpServlet { } try { - OfferingDao ofDao = new OfferingDao(); + ofDao.setOfferMapping( amazonOffer, cloudOffer ); } catch( Exception e ) { @@ -596,9 +619,7 @@ public class EC2RestServlet extends HttpServlet { } try { - OfferingDao ofDao = new OfferingDao(); ofDao.deleteOfferMapping( amazonOffer ); - } catch( Exception e ) { logger.error("DeleteOfferMapping " + e.getMessage(), e); response.setStatus(401); @@ -1695,8 +1716,8 @@ public class EC2RestServlet extends HttpServlet { } // [B] Use the cloudAccessKey to get the users secret key in the db - UserCredentialsDao credentialDao = new UserCredentialsDao(); - UserCredentials cloudKeys = credentialDao.getByAccessKey( cloudAccessKey ); + UserCredentialsVO cloudKeys = ucDao.getByAccessKey( cloudAccessKey ); + if ( null == cloudKeys ) { logger.debug( cloudAccessKey + " is not defined in the EC2 service - call SetUserKeys" ); diff --git a/awsapi/src/com/cloud/bridge/service/S3RestServlet.java b/awsapi/src/com/cloud/bridge/service/S3RestServlet.java index 282385b063e..c1458a7b4af 100644 --- a/awsapi/src/com/cloud/bridge/service/S3RestServlet.java +++ b/awsapi/src/com/cloud/bridge/service/S3RestServlet.java @@ -43,9 +43,12 @@ import org.w3c.dom.NodeList; import com.cloud.bridge.io.MultiPartDimeInputStream; import com.cloud.bridge.model.SAcl; -import com.cloud.bridge.persist.PersistContext; +import com.cloud.bridge.model.UserCredentialsVO; import com.cloud.bridge.persist.dao.CloudStackConfigurationDao; +import com.cloud.bridge.persist.dao.CloudStackConfigurationDaoImpl; import com.cloud.bridge.persist.dao.UserCredentialsDao; + +import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl; import com.cloud.bridge.service.controller.s3.S3BucketAction; import com.cloud.bridge.service.controller.s3.S3ObjectAction; import com.cloud.bridge.service.controller.s3.ServiceProvider; @@ -57,26 +60,29 @@ import com.cloud.bridge.service.core.s3.S3Grant; import com.cloud.bridge.service.core.s3.S3MetaDataEntry; import com.cloud.bridge.service.core.s3.S3PutObjectRequest; import com.cloud.bridge.service.core.s3.S3PutObjectResponse; -import com.cloud.bridge.service.exception.InternalErrorException; import com.cloud.bridge.service.exception.InvalidBucketName; -import com.cloud.bridge.service.exception.NoSuchObjectException; import com.cloud.bridge.service.exception.PermissionDeniedException; -import com.cloud.bridge.util.AuthenticationUtils; import com.cloud.bridge.util.ConfigurationHelper; import com.cloud.bridge.util.HeaderParam; import com.cloud.bridge.util.RestAuth; import com.cloud.bridge.util.S3SoapAuth; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.Transaction; +import net.sf.ehcache.Cache; public class S3RestServlet extends HttpServlet { private static final long serialVersionUID = -6168996266762804877L; public static final String ENABLE_S3_API="enable.s3.api"; private static boolean isS3APIEnabled = false; public static final Logger logger = Logger.getLogger(S3RestServlet.class); + protected final CloudStackConfigurationDao csDao = ComponentLocator.inject(CloudStackConfigurationDaoImpl.class); + protected final UserCredentialsDao ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class); protected void doGet(HttpServletRequest req, HttpServletResponse resp) { processRequest( req, resp, "GET" ); - } + } protected void doPost(HttpServletRequest req, HttpServletResponse resp) { @@ -106,15 +112,13 @@ public class S3RestServlet extends HttpServlet { public void init( ServletConfig config ) throws ServletException { try{ ConfigurationHelper.preConfigureConfigPathFromServletContext(config.getServletContext()); - UserCredentialsDao.preCheckTableExistence(); // check if API is enabled - CloudStackConfigurationDao csDao = new CloudStackConfigurationDao(); String value = csDao.getConfigValue(ENABLE_S3_API); if(value != null) { isS3APIEnabled = Boolean.valueOf(value); } - PersistContext.commitTransaction(true); - PersistContext.closeSession(true); + logger.info("S3Engine :: Configuration value is : " + value); + }catch(Exception e){ throw new ServletException("Error initializing awsapi: " + e.getMessage()); } @@ -130,6 +134,7 @@ public class S3RestServlet extends HttpServlet { */ private void processRequest( HttpServletRequest request, HttpServletResponse response, String method ) { + Transaction txn = Transaction.open("cloudbridge", Transaction.AWSAPI_DB, true); try { logRequest(request); @@ -164,12 +169,13 @@ public class S3RestServlet extends HttpServlet { } + txn.start(); // -> authenticated calls if ( !((method.equalsIgnoreCase( "POST" ) && !(request.getQueryString().equalsIgnoreCase("delete"))) ) ){ S3AuthParams params = extractRequestHeaders( request ); authenticateRequest( request, params ); } - + ServletAction action = routeRequest(request); if ( action != null ) { action.execute(request, response); @@ -178,35 +184,30 @@ public class S3RestServlet extends HttpServlet { response.setStatus(404); endResponse(response, "File not found"); } - - PersistContext.commitTransaction(); - + txn.close(); } catch( InvalidBucketName e) { - PersistContext.rollbackTransaction(); logger.error("Unexpected exception " + e.getMessage(), e); response.setStatus(400); endResponse(response, "Invalid Bucket Name - " + e.toString()); } catch(PermissionDeniedException e) { - PersistContext.rollbackTransaction(); logger.error("Unexpected exception " + e.getMessage(), e); response.setStatus(403); endResponse(response, "Access denied - " + e.toString()); } catch(Throwable e) { - PersistContext.rollbackTransaction(); logger.error("Unexpected exception " + e.getMessage(), e); response.setStatus(404); endResponse(response, "Bad request"); } finally { + try { response.flushBuffer(); } catch (IOException e) { logger.error("Unexpected exception " + e.getMessage(), e); } - PersistContext.closeSession(); } } @@ -239,6 +240,7 @@ public class S3RestServlet extends HttpServlet { * * As with all REST calls HTTPS should be used to ensure their security. */ + @DB private void setUserKeys( HttpServletRequest request, HttpServletResponse response ) { String[] accessKey = null; String[] secretKey = null; @@ -266,8 +268,14 @@ public class S3RestServlet extends HttpServlet { try { // -> use the keys to see if the account actually exists //ServiceProvider.getInstance().getEC2Engine().validateAccount( accessKey[0], secretKey[0] ); - UserCredentialsDao credentialDao = new UserCredentialsDao(); - credentialDao.setUserKeys( accessKey[0], secretKey[0] ); + //UserCredentialsDaoImpl credentialDao = new UserCredentialsDao(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + txn.start(); + UserCredentialsVO user = new UserCredentialsVO(accessKey[0], secretKey[0]); + user = ucDao.persist(user); + txn.commit(); + txn.close(); + //credentialDao.setUserKeys( accessKey[0], secretKey[0] ); } catch( Exception e ) { logger.error("SetUserKeys " + e.getMessage(), e); @@ -586,7 +594,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request) xml.append( "" ); endResponse(response, xml.toString()); - PersistContext.commitTransaction(); return; } @@ -605,7 +612,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request) xml.append( "" ); endResponse(response, xml.toString()); - PersistContext.commitTransaction(); } catch(PermissionDeniedException e) { logger.error("Unexpected exception " + e.getMessage(), e); @@ -618,7 +624,6 @@ private S3ObjectAction routePlainPostRequest (HttpServletRequest request) } finally { - PersistContext.closeSession(); } } diff --git a/awsapi/src/com/cloud/bridge/service/controller/s3/S3BucketAction.java b/awsapi/src/com/cloud/bridge/service/controller/s3/S3BucketAction.java index 0ea862d10b2..8f77916f750 100644 --- a/awsapi/src/com/cloud/bridge/service/controller/s3/S3BucketAction.java +++ b/awsapi/src/com/cloud/bridge/service/controller/s3/S3BucketAction.java @@ -49,14 +49,18 @@ import com.amazon.s3.GetBucketAccessControlPolicyResponse; import com.amazon.s3.ListAllMyBucketsResponse; import com.amazon.s3.ListBucketResponse; import com.cloud.bridge.io.MTOMAwareResultStreamWriter; +import com.cloud.bridge.model.BucketPolicyVO; import com.cloud.bridge.model.SAcl; +import com.cloud.bridge.model.SAclVO; import com.cloud.bridge.model.SBucket; +import com.cloud.bridge.model.SBucketVO; import com.cloud.bridge.model.SHost; -import com.cloud.bridge.persist.PersistContext; import com.cloud.bridge.persist.dao.BucketPolicyDao; +import com.cloud.bridge.persist.dao.BucketPolicyDaoImpl; import com.cloud.bridge.persist.dao.MultipartLoadDao; -import com.cloud.bridge.persist.dao.SAclDao; +import com.cloud.bridge.persist.dao.SAclDaoImpl; import com.cloud.bridge.persist.dao.SBucketDao; +import com.cloud.bridge.persist.dao.SBucketDaoImpl; import com.cloud.bridge.service.S3Constants; import com.cloud.bridge.service.S3RestServlet; import com.cloud.bridge.service.controller.s3.ServiceProvider; @@ -81,16 +85,13 @@ import com.cloud.bridge.service.core.s3.S3ListAllMyBucketsResponse; import com.cloud.bridge.service.core.s3.S3ListBucketObjectEntry; import com.cloud.bridge.service.core.s3.S3ListBucketRequest; import com.cloud.bridge.service.core.s3.S3ListBucketResponse; -import com.cloud.bridge.service.core.s3.S3MetaDataEntry; import com.cloud.bridge.service.core.s3.S3MultipartUpload; import com.cloud.bridge.service.core.s3.S3PolicyContext; -import com.cloud.bridge.service.core.s3.S3PutObjectRequest; import com.cloud.bridge.service.core.s3.S3Response; import com.cloud.bridge.service.core.s3.S3SetBucketAccessControlPolicyRequest; import com.cloud.bridge.service.core.s3.S3BucketPolicy.PolicyAccess; import com.cloud.bridge.service.core.s3.S3PolicyAction.PolicyActions; import com.cloud.bridge.service.core.s3.S3PolicyCondition.ConditionKeys; -import com.cloud.bridge.service.exception.InternalErrorException; import com.cloud.bridge.service.exception.InvalidBucketName; import com.cloud.bridge.service.exception.InvalidRequestContentException; import com.cloud.bridge.service.exception.NetworkIOException; @@ -108,10 +109,14 @@ import com.cloud.bridge.util.Triple; import com.cloud.bridge.util.XSerializer; import com.cloud.bridge.util.XSerializerXmlAdapter; import com.cloud.bridge.util.XmlHelper; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.Transaction; public class S3BucketAction implements ServletAction { protected final static Logger logger = Logger.getLogger(S3BucketAction.class); + protected final BucketPolicyDao bPolicyDao = ComponentLocator.inject(BucketPolicyDaoImpl.class); + protected final SBucketDao bucketDao = ComponentLocator.inject(SBucketDaoImpl.class); private DocumentBuilderFactory dbf = null; public S3BucketAction() { @@ -347,18 +352,16 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes String policy = streamToString( request.getInputStream()); // [A] Is there an owner of an existing policy or bucket? - BucketPolicyDao policyDao = new BucketPolicyDao(); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName( bucketName ); + SBucketVO bucket = bucketDao.getByName( bucketName ); String owner = null; if ( null != bucket ) { - owner = bucket.getOwnerCanonicalId(); + owner = bucket.getOwnerCanonicalId(); } else { try { - owner = policyDao.getPolicyOwner( bucketName ); + owner = bPolicyDao.getByName(bucketName).getOwnerCanonicalID(); } catch( Exception e ) {} } @@ -366,36 +369,42 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes // [B] "The bucket owner by default has permissions to attach bucket policies to their buckets using PUT Bucket policy." // -> the bucket owner may want to restrict the IP address from where this can be executed - String client = UserContext.current().getCanonicalUserId(); - S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketPolicy, bucketName ); - switch( S3Engine.verifyPolicy( context )) { - case ALLOW: - break; - - case DEFAULT_DENY: - if (null != owner && !client.equals( owner )) { - response.setStatus(405); - return; - } - break; - - case DENY: - response.setStatus(403); - return; - } - - + String client = UserContext.current().getCanonicalUserId(); + S3PolicyContext context = new S3PolicyContext( + PolicyActions.PutBucketPolicy, bucketName); + + switch (S3Engine.verifyPolicy(context)) { + case ALLOW: + break; + + case DEFAULT_DENY: + if (null != owner && !client.equals(owner)) { + response.setStatus(405); + return; + } + break; + case DENY: + response.setStatus(403); + return; + } + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); // [B] Place the policy into the database over writting an existing policy try { // -> first make sure that the policy is valid by parsing it PolicyParser parser = new PolicyParser(); S3BucketPolicy sbp = parser.parse( policy, bucketName ); - - policyDao.deletePolicy( bucketName ); - if (null != policy && !policy.isEmpty()) policyDao.addPolicy( bucketName, client, policy ); + bPolicyDao.deletePolicy(bucketName); + + if (null != policy && !policy.isEmpty()) { + BucketPolicyVO bpolicy = new BucketPolicyVO(bucketName, client, policy); + bpolicy = bPolicyDao.persist(bpolicy); + //policyDao.addPolicy( bucketName, client, policy ); + } if (null != sbp) ServiceProvider.getInstance().setBucketPolicy( bucketName, sbp ); - response.setStatus(200); + response.setStatus(200); + txn.commit(); + txn.close(); } catch( PermissionDeniedException e ) { logger.error("Put Bucket Policy failed due to " + e.getMessage(), e); @@ -416,185 +425,193 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); // [A] Is there an owner of an existing policy or bucket? - BucketPolicyDao policyDao = new BucketPolicyDao(); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName( bucketName ); - String owner = null; - - if ( null != bucket ) - { - owner = bucket.getOwnerCanonicalId(); - } - else - { try { - owner = policyDao.getPolicyOwner( bucketName ); - } - catch( Exception e ) {} - } + SBucketVO bucket = bucketDao.getByName(bucketName); + String owner = null; - - // [B] "The bucket owner by default has permissions to retrieve bucket policies using GET Bucket policy." - // -> the bucket owner may want to restrict the IP address from where this can be executed - String client = UserContext.current().getCanonicalUserId(); - S3PolicyContext context = new S3PolicyContext( PolicyActions.GetBucketPolicy, bucketName ); - switch( S3Engine.verifyPolicy( context )) { - case ALLOW: - break; - - case DEFAULT_DENY: - if (null != owner && !client.equals( owner )) { - response.setStatus(405); - return; - } - break; - - case DENY: - response.setStatus(403); - return; - } - - - // [B] Pull the policy from the database if one exists - try { - String policy = policyDao.getPolicy( bucketName ); - if ( null == policy ) { - response.setStatus(404); - } - else { - response.setStatus(200); - response.setContentType("application/json"); - S3RestServlet.endResponse(response, policy); - } - } - catch( Exception e ) { - logger.error("Get Bucket Policy failed due to " + e.getMessage(), e); - response.setStatus(500); - } + if (null != bucket) { + owner = bucket.getOwnerCanonicalId(); + } else { + try { + owner = bPolicyDao.getByName(bucketName).getOwnerCanonicalID(); + } catch (Exception e) { + } } - private void executeDeleteBucketPolicy(HttpServletRequest request, HttpServletResponse response) - { - String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); - - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName( bucketName ); - if (bucket != null) - { - String client = UserContext.current().getCanonicalUserId(); - if (!client.equals( bucket.getOwnerCanonicalId())) { - response.setStatus(405); - return; - } - } + // [B] + // "The bucket owner by default has permissions to retrieve bucket policies using GET Bucket policy." + // -> the bucket owner may want to restrict the IP address from where + // this can be executed + String client = UserContext.current().getCanonicalUserId(); + S3PolicyContext context = new S3PolicyContext( + PolicyActions.GetBucketPolicy, bucketName); + switch (S3Engine.verifyPolicy(context)) { + case ALLOW: + break; - try { - BucketPolicyDao policyDao = new BucketPolicyDao(); - String policy = policyDao.getPolicy( bucketName ); - if ( null == policy ) { - response.setStatus(204); - } - else { - ServiceProvider.getInstance().deleteBucketPolicy( bucketName ); - policyDao.deletePolicy( bucketName ); - response.setStatus(200); - } - } - catch( Exception e ) { - logger.error("Delete Bucket Policy failed due to " + e.getMessage(), e); - response.setStatus(500); - } + case DEFAULT_DENY: + if (null != owner && !client.equals(owner)) { + response.setStatus(405); + return; + } + break; + + case DENY: + response.setStatus(403); + return; } - public void executeGetAllBuckets(HttpServletRequest request, HttpServletResponse response) - throws IOException, XMLStreamException - { - Calendar cal = Calendar.getInstance(); - cal.set( 1970, 1, 1 ); - S3ListAllMyBucketsRequest engineRequest = new S3ListAllMyBucketsRequest(); - engineRequest.setAccessKey(UserContext.current().getAccessKey()); - engineRequest.setRequestTimestamp( cal ); - engineRequest.setSignature( "" ); - - + // [B] Pull the policy from the database if one exists + try { + String policy = bPolicyDao.getByName(bucketName).getPolicy(); + if (null == policy) { + response.setStatus(404); + } else { + response.setStatus(200); + response.setContentType("application/json"); + S3RestServlet.endResponse(response, policy); + } + } catch (Exception e) { + logger.error("Get Bucket Policy failed due to " + e.getMessage(), e); + response.setStatus(500); + } + } + private void executeDeleteBucketPolicy(HttpServletRequest request, + HttpServletResponse response) { + String bucketName = (String) request + .getAttribute(S3Constants.BUCKET_ATTR_KEY); - S3ListAllMyBucketsResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); - - // To allow the all buckets list to be serialized via Axiom classes - ListAllMyBucketsResponse allBuckets = S3SerializableServiceImplementation.toListAllMyBucketsResponse( engineResponse ); - - OutputStream outputStream = response.getOutputStream(); - response.setStatus(200); - response.setContentType("application/xml"); - // The content-type literally should be "application/xml; charset=UTF-8" - // but any compliant JVM supplies utf-8 by default - -// MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListAllMyBucketsResult", outputStream ); -// resultWriter.startWrite(); -// resultWriter.writeout(allBuckets); -// resultWriter.stopWrite(); - StringBuffer xml = new StringBuffer(); - xml.append( "" ); - xml.append(""); - xml.append(""); - xml.append(engineResponse.getOwner().getID()).append(""); - xml.append("").append(engineResponse.getOwner().getDisplayName()).append(""); - xml.append("").append(""); - SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); - for (S3ListAllMyBucketsEntry entry :engineResponse.getBuckets()) { - xml.append("").append("").append(entry.getName()).append(""); - xml.append("").append(sdf.format(entry.getCreationDate().getTime())).append(""); - xml.append(""); - } - xml.append("").append(""); - response.setStatus(200); - response.setContentType("text/xml; charset=UTF-8"); - S3RestServlet.endResponse(response, xml.toString()); + SBucketVO bucket = bucketDao.getByName(bucketName); + if (bucket != null) { + String client = UserContext.current().getCanonicalUserId(); + if (!client.equals(bucket.getOwnerCanonicalId())) { + response.setStatus(405); + return; + } + } + + try { + + String policy = bPolicyDao.getByName(bucketName).getPolicy(); + if (null == policy) { + response.setStatus(204); + } else { + ServiceProvider.getInstance().deleteBucketPolicy(bucketName); + bPolicyDao.deletePolicy(bucketName); + response.setStatus(200); + } + } catch (Exception e) { + logger.error( + "Delete Bucket Policy failed due to " + e.getMessage(), e); + response.setStatus(500); + } + } + + public void executeGetAllBuckets(HttpServletRequest request, + HttpServletResponse response) throws IOException, + XMLStreamException { + Calendar cal = Calendar.getInstance(); + cal.set(1970, 1, 1); + S3ListAllMyBucketsRequest engineRequest = new S3ListAllMyBucketsRequest(); + engineRequest.setAccessKey(UserContext.current().getAccessKey()); + engineRequest.setRequestTimestamp(cal); + engineRequest.setSignature(""); + + S3ListAllMyBucketsResponse engineResponse = ServiceProvider + .getInstance().getS3Engine().handleRequest(engineRequest); + + // To allow the all buckets list to be serialized via Axiom classes + ListAllMyBucketsResponse allBuckets = S3SerializableServiceImplementation + .toListAllMyBucketsResponse(engineResponse); + + OutputStream outputStream = response.getOutputStream(); + response.setStatus(200); + response.setContentType("application/xml"); + // The content-type literally should be "application/xml; charset=UTF-8" + // but any compliant JVM supplies utf-8 by default + + // MTOMAwareResultStreamWriter resultWriter = new + // MTOMAwareResultStreamWriter ("ListAllMyBucketsResult", outputStream + // ); + // resultWriter.startWrite(); + // resultWriter.writeout(allBuckets); + // resultWriter.stopWrite(); + StringBuffer xml = new StringBuffer(); + xml.append(""); + xml.append(""); + xml.append(""); + xml.append(engineResponse.getOwner().getID()).append(""); + xml.append("") + .append(engineResponse.getOwner().getDisplayName()) + .append(""); + xml.append("").append(""); + SimpleDateFormat sdf = new SimpleDateFormat( + "yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + for (S3ListAllMyBucketsEntry entry : engineResponse.getBuckets()) { + xml.append("").append("").append(entry.getName()) + .append(""); + xml.append("") + .append(sdf.format(entry.getCreationDate().getTime())) + .append(""); + xml.append(""); + } + xml.append("").append(""); + response.setStatus(200); + response.setContentType("text/xml; charset=UTF-8"); + S3RestServlet.endResponse(response, xml.toString()); } public void executeGetBucket(HttpServletRequest request, HttpServletResponse response) throws IOException, XMLStreamException { - S3ListBucketRequest engineRequest = new S3ListBucketRequest(); - engineRequest.setBucketName((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)); - engineRequest.setDelimiter(request.getParameter("delimiter")); - engineRequest.setMarker(request.getParameter("marker")); - engineRequest.setPrefix(request.getParameter("prefix")); - - int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000); - engineRequest.setMaxKeys(maxKeys); - try { - S3ListBucketResponse engineResponse = ServiceProvider.getInstance().getS3Engine().listBucketContents( engineRequest, false ); - - // To allow the all list buckets result to be serialized via Axiom classes - ListBucketResponse oneBucket = S3SerializableServiceImplementation.toListBucketResponse( engineResponse ); - - OutputStream outputStream = response.getOutputStream(); - response.setStatus(200); - response.setContentType("application/xml"); - // The content-type literally should be "application/xml; charset=UTF-8" - // but any compliant JVM supplies utf-8 by default; - - MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter ("ListBucketResult", outputStream ); - resultWriter.startWrite(); - resultWriter.writeout(oneBucket); - resultWriter.stopWrite(); - } catch (NoSuchObjectException nsoe) { - response.setStatus(404); - response.setContentType("application/xml"); + S3ListBucketRequest engineRequest = new S3ListBucketRequest(); + engineRequest.setBucketName((String) request + .getAttribute(S3Constants.BUCKET_ATTR_KEY)); + engineRequest.setDelimiter(request.getParameter("delimiter")); + engineRequest.setMarker(request.getParameter("marker")); + engineRequest.setPrefix(request.getParameter("prefix")); - StringBuffer xmlError = new StringBuffer(); - xmlError.append("") + int maxKeys = Converter.toInt(request.getParameter("max-keys"), 1000); + engineRequest.setMaxKeys(maxKeys); + try { + S3ListBucketResponse engineResponse = ServiceProvider.getInstance() + .getS3Engine().listBucketContents(engineRequest, false); + + // To allow the all list buckets result to be serialized via Axiom + // classes + ListBucketResponse oneBucket = S3SerializableServiceImplementation + .toListBucketResponse(engineResponse); + + OutputStream outputStream = response.getOutputStream(); + response.setStatus(200); + response.setContentType("application/xml"); + // The content-type literally should be + // "application/xml; charset=UTF-8" + // but any compliant JVM supplies utf-8 by default; + + MTOMAwareResultStreamWriter resultWriter = new MTOMAwareResultStreamWriter( + "ListBucketResult", outputStream); + resultWriter.startWrite(); + resultWriter.writeout(oneBucket); + resultWriter.stopWrite(); + } catch (NoSuchObjectException nsoe) { + response.setStatus(404); + response.setContentType("application/xml"); + + StringBuffer xmlError = new StringBuffer(); + xmlError.append("") .append("NoSuchBucketThe specified bucket does not exist") - .append("").append((String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY)) + .append("") + .append((String) request + .getAttribute(S3Constants.BUCKET_ATTR_KEY)) .append("") - .append("1DEADBEEF9") //TODO - .append("abCdeFgHiJ1k2LmN3op4q56r7st89") //TODO + .append("1DEADBEEF9") // TODO + .append("abCdeFgHiJ1k2LmN3op4q56r7st89") // TODO .append(""); - S3RestServlet.endResponse(response, xmlError.toString()); + S3RestServlet.endResponse(response, xmlError.toString()); - } + } } @@ -640,8 +657,7 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes return; } - SBucketDao bucketDao = new SBucketDao(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName( bucketName ); if (sbucket == null) { response.setStatus( 404 ); return; @@ -834,111 +850,124 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes public void executePutBucketAcl(HttpServletRequest request, HttpServletResponse response) throws IOException { // [A] Determine that there is an applicable bucket which might have an ACL set - - String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName( bucketName ); - String owner = null; - if ( null != bucket ) - owner = bucket.getOwnerCanonicalId(); - if (null == owner) - { - logger.error( "ACL update failed since " + bucketName + " does not exist" ); - throw new IOException("ACL update failed"); - } - - // [B] Obtain the grant request which applies to the acl request string. This latter is supplied as the value of the x-amz-acl header. - - S3SetBucketAccessControlPolicyRequest engineRequest = new S3SetBucketAccessControlPolicyRequest(); - S3Grant grantRequest = new S3Grant(); - S3AccessControlList aclRequest = new S3AccessControlList(); - - String aclRequestString = request.getHeader("x-amz-acl"); - OrderedPair accessControlsForBucketOwner = SAcl.getCannedAccessControls(aclRequestString,"SBucket"); - grantRequest.setPermission(accessControlsForBucketOwner.getFirst()); - grantRequest.setGrantee(accessControlsForBucketOwner.getSecond()); - grantRequest.setCanonicalUserID(owner); - aclRequest.addGrant(grantRequest); - engineRequest.setAcl(aclRequest); - engineRequest.setBucketName(bucketName); - - - // [C] Allow an S3Engine to handle the S3SetBucketAccessControlPolicyRequest - S3Response engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest(engineRequest); - response.setStatus( engineResponse.getResultCode()); + + String bucketName = (String) request + .getAttribute(S3Constants.BUCKET_ATTR_KEY); + SBucketVO bucket = bucketDao.getByName(bucketName); + String owner = null; + if (null != bucket) + owner = bucket.getOwnerCanonicalId(); + if (null == owner) { + logger.error("ACL update failed since " + bucketName + + " does not exist"); + throw new IOException("ACL update failed"); + } + + // [B] Obtain the grant request which applies to the acl request string. + // This latter is supplied as the value of the x-amz-acl header. + + S3SetBucketAccessControlPolicyRequest engineRequest = new S3SetBucketAccessControlPolicyRequest(); + S3Grant grantRequest = new S3Grant(); + S3AccessControlList aclRequest = new S3AccessControlList(); + + String aclRequestString = request.getHeader("x-amz-acl"); + OrderedPair accessControlsForBucketOwner = SAclVO.getCannedAccessControls(aclRequestString, "SBucket"); + grantRequest.setPermission(accessControlsForBucketOwner.getFirst()); + grantRequest.setGrantee(accessControlsForBucketOwner.getSecond()); + grantRequest.setCanonicalUserID(owner); + aclRequest.addGrant(grantRequest); + engineRequest.setAcl(aclRequest); + engineRequest.setBucketName(bucketName); + + // [C] Allow an S3Engine to handle the + // S3SetBucketAccessControlPolicyRequest + S3Response engineResponse = ServiceProvider.getInstance().getS3Engine() + .handleRequest(engineRequest); + response.setStatus(engineResponse.getResultCode()); } public void executePutBucketVersioning(HttpServletRequest request, HttpServletResponse response) throws IOException { - String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); - String versioningStatus = null; - Node item = null; + String bucketName = (String) request.getAttribute(S3Constants.BUCKET_ATTR_KEY); + String versioningStatus = null; + Node item = null; - if (null == bucketName) { - logger.error( "executePutBucketVersioning - no bucket name given" ); - response.setStatus( 400 ); - return; - } - - // -> is the XML as defined? - try { - DocumentBuilder db = dbf.newDocumentBuilder(); - Document restXML = db.parse( request.getInputStream()); - NodeList match = S3RestServlet.getElement( restXML, "http://s3.amazonaws.com/doc/2006-03-01/", "Status" ); - if ( 0 < match.getLength()) - { - item = match.item(0); - versioningStatus = new String( item.getFirstChild().getNodeValue()); - } - else - { logger.error( "executePutBucketVersioning - cannot find Status tag in XML body" ); - response.setStatus( 400 ); - return; - } - } - catch( Exception e ) { - logger.error( "executePutBucketVersioning - failed to parse XML due to " + e.getMessage(), e); - response.setStatus(400); - return; - } - - try { - // Irrespective of what the ACLs say only the owner can turn on versioning on a bucket. - // The bucket owner may want to restrict the IP address from which this can occur. - SBucketDao bucketDao = new SBucketDao(); - SBucket sbucket = bucketDao.getByName( bucketName ); - - String client = UserContext.current().getCanonicalUserId(); - if (!client.equals( sbucket.getOwnerCanonicalId())) - throw new PermissionDeniedException( "Access Denied - only the owner can turn on versioing on a bucket" ); - - S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketVersioning, bucketName ); - if (PolicyAccess.DENY == S3Engine.verifyPolicy( context )) { - response.setStatus(403); - return; - } + if (null == bucketName) { + logger.error("executePutBucketVersioning - no bucket name given"); + response.setStatus(400); + return; + } - - if (versioningStatus.equalsIgnoreCase( "Enabled" )) sbucket.setVersioningStatus( 1 ); - else if (versioningStatus.equalsIgnoreCase( "Suspended")) sbucket.setVersioningStatus( 2 ); - else { - logger.error( "executePutBucketVersioning - unknown state: [" + versioningStatus + "]" ); - response.setStatus( 400 ); - return; - } - bucketDao.update( sbucket ); - - } catch( PermissionDeniedException e ) { - logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e); - throw e; - - } catch( Exception e ) { - logger.error( "executePutBucketVersioning - failed due to " + e.getMessage(), e); - response.setStatus(500); - return; - } - response.setStatus(200); + // -> is the XML as defined? + try { + DocumentBuilder db = dbf.newDocumentBuilder(); + Document restXML = db.parse(request.getInputStream()); + NodeList match = S3RestServlet.getElement(restXML, + "http://s3.amazonaws.com/doc/2006-03-01/", "Status"); + if (0 < match.getLength()) { + item = match.item(0); + versioningStatus = new String(item.getFirstChild() + .getNodeValue()); + } else { + logger.error("executePutBucketVersioning - cannot find Status tag in XML body"); + response.setStatus(400); + return; + } + } catch (Exception e) { + logger.error( + "executePutBucketVersioning - failed to parse XML due to " + + e.getMessage(), e); + response.setStatus(400); + return; + } + + try { + // Irrespective of what the ACLs say only the owner can turn on + // versioning on a bucket. + // The bucket owner may want to restrict the IP address from which + // this can occur. + + SBucketVO sbucket = bucketDao.getByName(bucketName); + + String client = UserContext.current().getCanonicalUserId(); + if (!client.equals(sbucket.getOwnerCanonicalId())) + throw new PermissionDeniedException( + "Access Denied - only the owner can turn on versioing on a bucket"); + + S3PolicyContext context = new S3PolicyContext( + PolicyActions.PutBucketVersioning, bucketName); + if (PolicyAccess.DENY == S3Engine.verifyPolicy(context)) { + response.setStatus(403); + return; + } + + if (versioningStatus.equalsIgnoreCase("Enabled")) + sbucket.setVersioningStatus(1); + else if (versioningStatus.equalsIgnoreCase("Suspended")) + sbucket.setVersioningStatus(2); + else { + logger.error("executePutBucketVersioning - unknown state: [" + + versioningStatus + "]"); + response.setStatus(400); + return; + } + bucketDao.update(sbucket.getId(), sbucket); + + } catch (PermissionDeniedException e) { + logger.error( + "executePutBucketVersioning - failed due to " + + e.getMessage(), e); + throw e; + + } catch (Exception e) { + logger.error( + "executePutBucketVersioning - failed due to " + + e.getMessage(), e); + response.setStatus(500); + return; + } + response.setStatus(200); } public void executePutBucketLogging(HttpServletRequest request, HttpServletResponse response) throws IOException { @@ -949,7 +978,7 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes public void executePutBucketWebsite(HttpServletRequest request, HttpServletResponse response) throws IOException { // TODO -- LoPri - Undertake checks on Put Bucket Website // Tested using configuration \nAllowOverride FileInfo AuthConfig Limit... in httpd.conf - // Need some way of using AllowOverride to allow use of .htaccess and then pushing .httaccess file to bucket subdirectory of mount point + // Need some way of using AllowOverride to allow use of .htaccess and then pushing .httaccess file to bucket subdirectory of mount point // Currently has noop effect in the sense that a running apachectl process sees the directory contents without further action response.setStatus(200); } @@ -976,128 +1005,145 @@ private void executeMultiObjectDelete(HttpServletRequest request, HttpServletRes public void executeListMultipartUploads(HttpServletRequest request, HttpServletResponse response) throws IOException { // [A] Obtain parameters and do basic bucket verification - String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); - String delimiter = request.getParameter("delimiter"); - String keyMarker = request.getParameter("key-marker"); - String prefix = request.getParameter("prefix"); - int maxUploads = 1000; - int nextUploadId = 0; - String nextKey = null; - boolean isTruncated = false; - S3MultipartUpload[] uploads = null; - S3MultipartUpload onePart = null; - - String temp = request.getParameter("max-uploads"); - if (null != temp) { - maxUploads = Integer.parseInt( temp ); - if (maxUploads > 1000 || maxUploads < 0) maxUploads = 1000; - } - - // -> upload-id-marker is ignored unless key-marker is also specified - String uploadIdMarker = request.getParameter("upload-id-marker"); - if (null == keyMarker) uploadIdMarker = null; - - // -> does the bucket exist, we may need it to verify access permissions - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); - if (bucket == null) { - logger.error( "listMultipartUpload failed since " + bucketName + " does not exist" ); - response.setStatus(404); - return; + String bucketName = (String) request + .getAttribute(S3Constants.BUCKET_ATTR_KEY); + String delimiter = request.getParameter("delimiter"); + String keyMarker = request.getParameter("key-marker"); + String prefix = request.getParameter("prefix"); + int maxUploads = 1000; + int nextUploadId = 0; + String nextKey = null; + boolean isTruncated = false; + S3MultipartUpload[] uploads = null; + S3MultipartUpload onePart = null; + String temp = request.getParameter("max-uploads"); + if (null != temp) { + maxUploads = Integer.parseInt(temp); + if (maxUploads > 1000 || maxUploads < 0) + maxUploads = 1000; + } + + // -> upload-id-marker is ignored unless key-marker is also specified + String uploadIdMarker = request.getParameter("upload-id-marker"); + if (null == keyMarker) + uploadIdMarker = null; + + // -> does the bucket exist, we may need it to verify access permissions + SBucketVO bucket = bucketDao.getByName(bucketName); + if (bucket == null) { + logger.error("listMultipartUpload failed since " + bucketName + + " does not exist"); + response.setStatus(404); + return; + } + + S3PolicyContext context = new S3PolicyContext( + PolicyActions.ListBucketMultipartUploads, bucketName); + context.setEvalParam(ConditionKeys.Prefix, prefix); + context.setEvalParam(ConditionKeys.Delimiter, delimiter); + S3Engine.verifyAccess(context, "SBucket", bucket.getId(), + SAcl.PERMISSION_READ); + + // [B] Query the multipart table to get the list of current uploads + try { + MultipartLoadDao uploadDao = new MultipartLoadDao(); + OrderedPair result = uploadDao + .getInitiatedUploads(bucketName, maxUploads, prefix, + keyMarker, uploadIdMarker); + uploads = result.getFirst(); + isTruncated = result.getSecond().booleanValue(); + } catch (Exception e) { + logger.error( + "List Multipart Uploads failed due to " + e.getMessage(), e); + response.setStatus(500); + } + + StringBuffer xml = new StringBuffer(); + xml.append(""); + xml.append(""); + xml.append("").append(bucketName).append(""); + xml.append("").append((null == keyMarker ? "" : keyMarker)) + .append(""); + xml.append("") + .append((null == uploadIdMarker ? "" : uploadIdMarker)) + .append(""); + + // [C] Construct the contents of the element + StringBuffer partsList = new StringBuffer(); + for (int i = 0; i < uploads.length; i++) { + onePart = uploads[i]; + if (null == onePart) + break; + + if (delimiter != null && !delimiter.isEmpty()) { + // -> is this available only in the CommonPrefixes element? + if (StringHelper.substringInBetween(onePart.getKey(), prefix, + delimiter) != null) + continue; + } + + nextKey = onePart.getKey(); + nextUploadId = onePart.getId(); + partsList.append(""); + partsList.append("").append(nextKey).append(""); + partsList.append("").append(nextUploadId) + .append(""); + partsList.append(""); + partsList.append("").append(onePart.getAccessKey()) + .append(""); + partsList.append(""); + partsList.append(""); + partsList.append(""); + partsList.append("").append(onePart.getAccessKey()) + .append(""); + partsList.append(""); + partsList.append(""); + partsList.append("STANDARD"); + partsList + .append("") + .append(DatatypeConverter.printDateTime(onePart + .getLastModified())).append(""); + partsList.append(""); + } + + // [D] Construct the contents of the elements (if any) + for (int i = 0; i < uploads.length; i++) { + onePart = uploads[i]; + if (null == onePart) + break; + + if (delimiter != null && !delimiter.isEmpty()) { + String subName = StringHelper.substringInBetween( + onePart.getKey(), prefix, delimiter); + if (subName != null) { + partsList.append(""); + partsList.append(""); + if (prefix != null && prefix.length() > 0) + partsList.append(prefix + delimiter + subName); + else + partsList.append(subName); + partsList.append(""); + partsList.append(""); } - - S3PolicyContext context = new S3PolicyContext( PolicyActions.ListBucketMultipartUploads, bucketName ); - context.setEvalParam( ConditionKeys.Prefix, prefix ); - context.setEvalParam( ConditionKeys.Delimiter, delimiter ); - S3Engine.verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_READ ); + } + } - - // [B] Query the multipart table to get the list of current uploads - try { - MultipartLoadDao uploadDao = new MultipartLoadDao(); - OrderedPair result = uploadDao.getInitiatedUploads( bucketName, maxUploads, prefix, keyMarker, uploadIdMarker ); - uploads = result.getFirst(); - isTruncated = result.getSecond().booleanValue(); - } - catch( Exception e ) { - logger.error("List Multipart Uploads failed due to " + e.getMessage(), e); - response.setStatus(500); - } + // [D] Finish off the response + xml.append("").append((null == nextKey ? "" : nextKey)) + .append(""); + xml.append("") + .append((0 == nextUploadId ? "" : nextUploadId)) + .append(""); + xml.append("").append(maxUploads).append(""); + xml.append("").append(isTruncated) + .append(""); - StringBuffer xml = new StringBuffer(); - xml.append( "" ); - xml.append( "" ); - xml.append( "" ).append( bucketName ).append( "" ); - xml.append( "").append((null == keyMarker ? "" : keyMarker)).append( "" ); - xml.append( "").append((null == uploadIdMarker ? "" : uploadIdMarker)).append( "" ); - - - // [C] Construct the contents of the element - StringBuffer partsList = new StringBuffer(); - for( int i=0; i < uploads.length; i++ ) - { - onePart = uploads[i]; - if (null == onePart) break; - - if (delimiter != null && !delimiter.isEmpty()) - { - // -> is this available only in the CommonPrefixes element? - if (StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter) != null) - continue; - } - - nextKey = onePart.getKey(); - nextUploadId = onePart.getId(); - partsList.append( "" ); - partsList.append( "" ).append( nextKey ).append( "" ); - partsList.append( "" ).append( nextUploadId ).append( "" ); - partsList.append( "" ); - partsList.append( "" ).append( onePart.getAccessKey()).append( "" ); - partsList.append( "" ); - partsList.append( "" ); - partsList.append( "" ); - partsList.append( "" ).append( onePart.getAccessKey()).append( "" ); - partsList.append( "" ); - partsList.append( "" ); - partsList.append( "STANDARD" ); - partsList.append( "" ).append( DatatypeConverter.printDateTime( onePart.getLastModified())).append( "" ); - partsList.append( "" ); - } - - // [D] Construct the contents of the elements (if any) - for( int i=0; i < uploads.length; i++ ) - { - onePart = uploads[i]; - if (null == onePart) break; + xml.append(partsList.toString()); + xml.append(""); - if (delimiter != null && !delimiter.isEmpty()) - { - String subName = StringHelper.substringInBetween(onePart.getKey(), prefix, delimiter); - if (subName != null) - { - partsList.append( "" ); - partsList.append( "" ); - if ( prefix != null && prefix.length() > 0 ) - partsList.append( prefix + delimiter + subName ); - else partsList.append( subName ); - partsList.append( "" ); - partsList.append( "" ); - } - } - } - - // [D] Finish off the response - xml.append( "" ).append((null == nextKey ? "" : nextKey)).append( "" ); - xml.append( "" ).append((0 == nextUploadId ? "" : nextUploadId)).append( "" ); - xml.append( "" ).append( maxUploads ).append( "" ); - xml.append( "" ).append( isTruncated ).append( "" ); - - xml.append( partsList.toString()); - xml.append( "" ); - - response.setStatus(200); - response.setContentType("text/xml; charset=UTF-8"); - S3RestServlet.endResponse(response, xml.toString()); + response.setStatus(200); + response.setContentType("text/xml; charset=UTF-8"); + S3RestServlet.endResponse(response, xml.toString()); } private String streamToString( InputStream is ) throws IOException diff --git a/awsapi/src/com/cloud/bridge/service/controller/s3/S3ObjectAction.java b/awsapi/src/com/cloud/bridge/service/controller/s3/S3ObjectAction.java index 8fb89f9de4e..ee4cec65e41 100644 --- a/awsapi/src/com/cloud/bridge/service/controller/s3/S3ObjectAction.java +++ b/awsapi/src/com/cloud/bridge/service/controller/s3/S3ObjectAction.java @@ -45,9 +45,12 @@ import com.amazon.s3.CopyObjectResponse; import com.amazon.s3.GetObjectAccessControlPolicyResponse; import com.cloud.bridge.io.MTOMAwareResultStreamWriter; import com.cloud.bridge.model.SAcl; +import com.cloud.bridge.model.SAclVO; import com.cloud.bridge.model.SBucket; +import com.cloud.bridge.model.SBucketVO; import com.cloud.bridge.persist.dao.MultipartLoadDao; import com.cloud.bridge.persist.dao.SBucketDao; +import com.cloud.bridge.persist.dao.SBucketDaoImpl; import com.cloud.bridge.service.S3Constants; import com.cloud.bridge.service.S3RestServlet; import com.cloud.bridge.service.UserContext; @@ -79,9 +82,11 @@ import com.cloud.bridge.util.DateHelper; import com.cloud.bridge.util.HeaderParam; import com.cloud.bridge.util.ServletRequestDataSource; import com.cloud.bridge.util.OrderedPair; +import com.cloud.utils.component.ComponentLocator; public class S3ObjectAction implements ServletAction { protected final static Logger logger = Logger.getLogger(S3ObjectAction.class); + protected final SBucketDao bucketDao = ComponentLocator.inject(SBucketDaoImpl.class); private DocumentBuilderFactory dbf = null; @@ -273,8 +278,7 @@ public class S3ObjectAction implements ServletAction { String bucketName = (String)request.getAttribute(S3Constants.BUCKET_ATTR_KEY); String key = (String)request.getAttribute(S3Constants.OBJECT_ATTR_KEY); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName( bucketName ); + SBucketVO bucket = bucketDao.getByName( bucketName ); String owner = null; if ( null != bucket ) owner = bucket.getOwnerCanonicalId(); @@ -296,7 +300,7 @@ public class S3ObjectAction implements ServletAction { S3AccessControlList aclRequest = new S3AccessControlList(); String aclRequestString = request.getHeader("x-amz-acl"); - OrderedPair accessControlsForObjectOwner = SAcl.getCannedAccessControls(aclRequestString,"SObject"); + OrderedPair accessControlsForObjectOwner = SAclVO.getCannedAccessControls(aclRequestString,"SObject"); grantRequest.setPermission(accessControlsForObjectOwner.getFirst()); grantRequest.setGrantee(accessControlsForObjectOwner.getSecond()); grantRequest.setCanonicalUserID(owner); @@ -484,6 +488,11 @@ public class S3ObjectAction implements ServletAction { S3GetObjectResponse engineResponse = ServiceProvider.getInstance().getS3Engine().handleRequest( engineRequest ); response.setStatus( engineResponse.getResultCode()); + + //bucket lookup for non-existance key + + if ( engineResponse.getResultCode() == 404 ) + return; String deleteMarker = engineResponse.getDeleteMarker(); if ( null != deleteMarker ) { @@ -891,8 +900,7 @@ public class S3ObjectAction implements ServletAction { // -> does the bucket exist, we may need it to verify access permissions - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); + SBucketVO bucket = bucketDao.getByName(bucketName); if (bucket == null) { logger.error( "listUploadParts failed since " + bucketName + " does not exist" ); response.setStatus(404); diff --git a/awsapi/src/com/cloud/bridge/service/controller/s3/ServiceProvider.java b/awsapi/src/com/cloud/bridge/service/controller/s3/ServiceProvider.java index c8ca2bd5351..2f1791e19a7 100644 --- a/awsapi/src/com/cloud/bridge/service/controller/s3/ServiceProvider.java +++ b/awsapi/src/com/cloud/bridge/service/controller/s3/ServiceProvider.java @@ -35,18 +35,19 @@ import java.util.TimerTask; import org.apache.axis2.AxisFault; import org.apache.log4j.Logger; import org.apache.log4j.xml.DOMConfigurator; -import org.hibernate.SessionException; import com.amazon.s3.AmazonS3SkeletonInterface; import com.amazon.ec2.AmazonEC2SkeletonInterface; -import com.cloud.bridge.model.MHost; +import com.cloud.bridge.model.MHostVO; import com.cloud.bridge.model.SHost; -import com.cloud.bridge.model.UserCredentials; -import com.cloud.bridge.persist.PersistContext; -import com.cloud.bridge.persist.PersistException; +import com.cloud.bridge.model.SHostVO; +import com.cloud.bridge.model.UserCredentialsVO; import com.cloud.bridge.persist.dao.MHostDao; +import com.cloud.bridge.persist.dao.MHostDaoImpl; import com.cloud.bridge.persist.dao.SHostDao; +import com.cloud.bridge.persist.dao.SHostDaoImpl; import com.cloud.bridge.persist.dao.UserCredentialsDao; +import com.cloud.bridge.persist.dao.UserCredentialsDaoImpl; import com.cloud.bridge.service.EC2SoapServiceImpl; import com.cloud.bridge.service.UserInfo; import com.cloud.bridge.service.core.ec2.EC2Engine; @@ -57,17 +58,23 @@ import com.cloud.bridge.util.ConfigurationHelper; import com.cloud.bridge.util.DateHelper; import com.cloud.bridge.util.NetHelper; import com.cloud.bridge.util.OrderedPair; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.Transaction; public class ServiceProvider { protected final static Logger logger = Logger.getLogger(ServiceProvider.class); - + protected final MHostDao mhostDao = ComponentLocator.inject(MHostDaoImpl.class); + protected final SHostDao shostDao = ComponentLocator.inject(SHostDaoImpl.class); + protected final UserCredentialsDao ucDao = ComponentLocator.inject(UserCredentialsDaoImpl.class); + public final static long HEARTBEAT_INTERVAL = 10000; private static ServiceProvider instance; private Map, Object> serviceMap = new HashMap, Object>(); private Timer timer = new Timer(); - private MHost mhost; + private MHostVO mhost; private Properties properties; private boolean useSubDomain = false; // use DNS sub domain for bucket name private String serviceEndpoint = null; @@ -81,6 +88,8 @@ public class ServiceProvider { protected ServiceProvider() throws IOException { // register service implementation object + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + txn.close(); engine = new S3Engine(); EC2_engine = new EC2Engine(); serviceMap.put(AmazonS3SkeletonInterface.class, new S3SerializableServiceImplementation(engine)); @@ -93,11 +102,9 @@ public class ServiceProvider { try { instance = new ServiceProvider(); instance.initialize(); - PersistContext.commitTransaction(); } catch(Throwable e) { logger.error("Unexpected exception " + e.getMessage(), e); } finally { - PersistContext.closeSession(); } } return instance; @@ -172,27 +179,34 @@ public class ServiceProvider { return properties; } - public UserInfo getUserInfo(String accessKey) - throws InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException { + public UserInfo getUserInfo(String accessKey) { UserInfo info = new UserInfo(); - - UserCredentialsDao credentialDao = new UserCredentialsDao(); - UserCredentials cloudKeys = credentialDao.getByAccessKey( accessKey ); - if ( null == cloudKeys ) { - logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" ); - return null; - } else { - info.setAccessKey( accessKey ); - info.setSecretKey( cloudKeys.getSecretKey()); - info.setCanonicalUserId(accessKey); - info.setDescription( "S3 REST request" ); - return info; + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + try { + txn.start(); + UserCredentialsVO cloudKeys = ucDao.getByAccessKey( accessKey ); + if ( null == cloudKeys ) { + logger.debug( accessKey + " is not defined in the S3 service - call SetUserKeys" ); + return null; + } else { + info.setAccessKey( accessKey ); + info.setSecretKey( cloudKeys.getSecretKey()); + info.setCanonicalUserId(accessKey); + info.setDescription( "S3 REST request" ); + return info; + } + }finally { + txn.commit(); } } - + + @DB protected void initialize() { if(logger.isInfoEnabled()) logger.info("Initializing ServiceProvider..."); + + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + //txn.close(); File file = ConfigurationHelper.findConfigurationFile("log4j-cloud.xml"); if(file != null) { @@ -226,14 +240,16 @@ public class ServiceProvider { setupHost(hostKey, host); // we will commit and start a new transaction to allow host info be flushed to DB - PersistContext.flush(); + //PersistContext.flush(); String localStorageRoot = properties.getProperty("storage.root"); if (localStorageRoot != null) setupLocalStorage(localStorageRoot); multipartDir = properties.getProperty("storage.multipartDir"); - + + Transaction txn1 = Transaction.open(Transaction.AWSAPI_DB); timer.schedule(getHeartbeatTask(), HEARTBEAT_INTERVAL, HEARTBEAT_INTERVAL); + txn1.close(); if(logger.isInfoEnabled()) logger.info("ServiceProvider initialized"); @@ -264,45 +280,41 @@ public class ServiceProvider { @Override public void run() { try { - MHostDao mhostDao = new MHostDao(); mhost.setLastHeartbeatTime(DateHelper.currentGMTTime()); - mhostDao.update(mhost); - PersistContext.commitTransaction(); + mhostDao.updateHeartBeat(mhost); } catch(Throwable e){ logger.error("Unexpected exception " + e.getMessage(), e); } finally { - PersistContext.closeSession(); } } }; } private void setupHost(String hostKey, String host) { - MHostDao mhostDao = new MHostDao(); - mhost = mhostDao.getByHostKey(hostKey); + + mhost = mhostDao.getByHostKey(hostKey); if(mhost == null) { - mhost = new MHost(); + mhost = new MHostVO(); mhost.setHostKey(hostKey); mhost.setHost(host); mhost.setLastHeartbeatTime(DateHelper.currentGMTTime()); - mhostDao.save(mhost); + mhost = mhostDao.persist(mhost); } else { mhost.setHost(host); - mhostDao.update(mhost); + mhostDao.update(mhost.getId(), mhost); } } private void setupLocalStorage(String storageRoot) { - SHostDao shostDao = new SHostDao(); - SHost shost = shostDao.getLocalStorageHost(mhost.getId(), storageRoot); + SHostVO shost = shostDao.getLocalStorageHost(mhost.getId(), storageRoot); if(shost == null) { - shost = new SHost(); + shost = new SHostVO(); shost.setMhost(mhost); - mhost.getLocalSHosts().add(shost); + shost.setMhostid(mhost.getId()); shost.setHostType(SHost.STORAGE_HOST_TYPE_LOCAL); shost.setHost(NetHelper.getHostName()); shost.setExportRoot(storageRoot); - PersistContext.getSession().save(shost); + shostDao.persist(shost); } } @@ -318,35 +330,36 @@ public class ServiceProvider { return (T) Proxy.newProxyInstance(serviceObject.getClass().getClassLoader(), new Class[] { serviceInterface }, new InvocationHandler() { - public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { - Object result = null; - try { - result = method.invoke(serviceObject, args); - PersistContext.commitTransaction(); - PersistContext.commitTransaction(true); - } catch (PersistException e) { - } catch (SessionException e) { - } catch(Throwable e) { - // Rethrow the exception to Axis: - // Check if the exception is an AxisFault or a RuntimeException - // enveloped AxisFault and if so, pass it on as such. Otherwise - // log to help debugging and throw as is. - if (e.getCause() != null && e.getCause() instanceof AxisFault) - throw e.getCause(); - else if (e.getCause() != null && e.getCause().getCause() != null - && e.getCause().getCause() instanceof AxisFault) - throw e.getCause().getCause(); - else { - logger.warn("Unhandled exception " + e.getMessage(), e); - throw e; - } - } finally { - PersistContext.closeSession(); - PersistContext.closeSession(true); - } - return result; - } - }); + public Object invoke(Object proxy, Method method, + Object[] args) throws Throwable { + Object result = null; + try { + result = method.invoke(serviceObject, args); + } catch (Throwable e) { + // Rethrow the exception to Axis: + // Check if the exception is an AxisFault or a + // RuntimeException + // enveloped AxisFault and if so, pass it on as + // such. Otherwise + // log to help debugging and throw as is. + if (e.getCause() != null + && e.getCause() instanceof AxisFault) + throw e.getCause(); + else if (e.getCause() != null + && e.getCause().getCause() != null + && e.getCause().getCause() instanceof AxisFault) + throw e.getCause().getCause(); + else { + logger.warn( + "Unhandled exception " + e.getMessage(), + e); + throw e; + } + } finally { + } + return result; + } + }); } @SuppressWarnings("unchecked") diff --git a/awsapi/src/com/cloud/bridge/service/core/ec2/EC2Engine.java b/awsapi/src/com/cloud/bridge/service/core/ec2/EC2Engine.java index 2d40381cc03..eb25249bd92 100644 --- a/awsapi/src/com/cloud/bridge/service/core/ec2/EC2Engine.java +++ b/awsapi/src/com/cloud/bridge/service/core/ec2/EC2Engine.java @@ -22,6 +22,9 @@ import java.io.FileNotFoundException; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.security.SignatureException; +import java.sql.Connection; +import java.sql.PreparedStatement; +import java.sql.ResultSet; import java.sql.SQLException; import java.text.ParseException; import java.util.ArrayList; @@ -32,12 +35,15 @@ import java.util.UUID; import javax.xml.parsers.ParserConfigurationException; import org.apache.log4j.Logger; -import org.hibernate.ejb.criteria.expression.UnaryArithmeticOperation.Operation; import org.xml.sax.SAXException; -import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDao; +import com.cloud.bridge.model.CloudStackServiceOfferingVO; import com.cloud.bridge.persist.dao.CloudStackAccountDao; -import com.cloud.bridge.persist.dao.OfferingDao; +import com.cloud.bridge.persist.dao.CloudStackAccountDaoImpl; +import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDao; +import com.cloud.bridge.persist.dao.CloudStackSvcOfferingDaoImpl; +import com.cloud.bridge.persist.dao.OfferingDaoImpl; +import com.cloud.bridge.persist.dao.SObjectItemDaoImpl; import com.cloud.bridge.service.UserContext; import com.cloud.bridge.service.core.ec2.EC2ImageAttributes.ImageAttribute; @@ -70,6 +76,8 @@ import com.cloud.stack.models.CloudStackUser; import com.cloud.stack.models.CloudStackUserVm; import com.cloud.stack.models.CloudStackVolume; import com.cloud.stack.models.CloudStackZone; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.Transaction; /** * EC2Engine processes the ec2 commands and calls their cloudstack analogs @@ -80,6 +88,9 @@ public class EC2Engine { String managementServer = null; String cloudAPIPort = null; + protected final CloudStackSvcOfferingDao scvoDao = ComponentLocator.inject(CloudStackSvcOfferingDaoImpl.class); + protected final OfferingDaoImpl ofDao = ComponentLocator.inject(OfferingDaoImpl.class); + CloudStackAccountDao accDao = ComponentLocator.inject(CloudStackAccountDaoImpl.class); private CloudStackApi _eng = null; private CloudStackAccount currentAccount = null; @@ -110,7 +121,6 @@ public class EC2Engine { managementServer = EC2Prop.getProperty( "managementServer" ); cloudAPIPort = EC2Prop.getProperty( "cloudAPIPort", null ); - OfferingDao ofDao = new OfferingDao(); try { if(ofDao.getOfferingCount() == 0) { String strValue = EC2Prop.getProperty("m1.small.serviceId"); @@ -1469,7 +1479,7 @@ public class EC2Engine { if(request.getInstanceType() != null){ instanceType = request.getInstanceType(); } - CloudStackServiceOffering svcOffering = getCSServiceOfferingId(instanceType); + CloudStackServiceOfferingVO svcOffering = getCSServiceOfferingId(instanceType); if(svcOffering == null){ logger.info("No ServiceOffering found to be defined by name, please contact the administrator "+instanceType ); throw new EC2ServiceException(ClientError.Unsupported, "instanceType: [" + instanceType + "] not found!"); @@ -1779,12 +1789,11 @@ public class EC2Engine { * */ - private CloudStackServiceOffering getCSServiceOfferingId(String instanceType){ + private CloudStackServiceOfferingVO getCSServiceOfferingId(String instanceType){ try { - if (null == instanceType) instanceType = "m1.small"; + if (null == instanceType) instanceType = "m1.small"; - CloudStackSvcOfferingDao dao = new CloudStackSvcOfferingDao(); - return dao.getSvcOfferingByName(instanceType); + return scvoDao.getSvcOfferingByName(instanceType); } catch(Exception e) { logger.error( "Error while retrieving ServiceOffering information by name - ", e); @@ -1802,8 +1811,8 @@ public class EC2Engine { */ private String serviceOfferingIdToInstanceType( String serviceOfferingId ){ try{ - CloudStackSvcOfferingDao dao = new CloudStackSvcOfferingDao(); - CloudStackServiceOffering offering = dao.getSvcOfferingById(serviceOfferingId); + + CloudStackServiceOfferingVO offering = scvoDao.getSvcOfferingById(serviceOfferingId); //dao.getSvcOfferingById(serviceOfferingId); if(offering == null){ logger.warn( "No instanceType match for serviceOfferingId: [" + serviceOfferingId + "]" ); return "m1.small"; @@ -2260,9 +2269,7 @@ public class EC2Engine { */ private String getDefaultZoneId(String accountId) { try { - CloudStackAccountDao dao = new CloudStackAccountDao(); - CloudStackAccount account = dao.getdefaultZoneId(accountId); - return account.getDefaultZoneId(); + return accDao.getDefaultZoneId(accountId); } catch(Exception e) { logger.error( "Error while retrieving Account information by id - ", e); throw new EC2ServiceException(ServerError.InternalError, e.getMessage()); diff --git a/awsapi/src/com/cloud/bridge/service/core/s3/S3Engine.java b/awsapi/src/com/cloud/bridge/service/core/s3/S3Engine.java index e8b73a42439..a117d133569 100644 --- a/awsapi/src/com/cloud/bridge/service/core/s3/S3Engine.java +++ b/awsapi/src/com/cloud/bridge/service/core/s3/S3Engine.java @@ -36,31 +36,40 @@ import java.util.UUID; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Logger; -import org.hibernate.LockMode; -import org.hibernate.Session; import org.json.simple.parser.ParseException; import com.cloud.bridge.io.S3FileSystemBucketAdapter; -import com.cloud.bridge.model.MHost; -import com.cloud.bridge.model.MHostMount; +import com.cloud.bridge.model.BucketPolicyVO; +import com.cloud.bridge.model.MHostMountVO; +import com.cloud.bridge.model.MHostVO; import com.cloud.bridge.model.SAcl; +import com.cloud.bridge.model.SAclVO; import com.cloud.bridge.model.SBucket; +import com.cloud.bridge.model.SBucketVO; import com.cloud.bridge.model.SHost; -import com.cloud.bridge.model.SMeta; -import com.cloud.bridge.model.SObject; -import com.cloud.bridge.model.SObjectItem; -import com.cloud.bridge.persist.PersistContext; +import com.cloud.bridge.model.SHostVO; +import com.cloud.bridge.model.SMetaVO; +import com.cloud.bridge.model.SObjectVO; +import com.cloud.bridge.model.SObjectItemVO; import com.cloud.bridge.persist.dao.BucketPolicyDao; +import com.cloud.bridge.persist.dao.BucketPolicyDaoImpl; import com.cloud.bridge.persist.dao.MHostDao; +import com.cloud.bridge.persist.dao.MHostDaoImpl; import com.cloud.bridge.persist.dao.MHostMountDao; +import com.cloud.bridge.persist.dao.MHostMountDaoImpl; import com.cloud.bridge.persist.dao.MultipartLoadDao; import com.cloud.bridge.persist.dao.SAclDao; +import com.cloud.bridge.persist.dao.SAclDaoImpl; import com.cloud.bridge.persist.dao.SBucketDao; +import com.cloud.bridge.persist.dao.SBucketDaoImpl; import com.cloud.bridge.persist.dao.SHostDao; +import com.cloud.bridge.persist.dao.SHostDaoImpl; import com.cloud.bridge.persist.dao.SMetaDao; +import com.cloud.bridge.persist.dao.SMetaDaoImpl; import com.cloud.bridge.persist.dao.SObjectDao; +import com.cloud.bridge.persist.dao.SObjectDaoImpl; import com.cloud.bridge.persist.dao.SObjectItemDao; -import com.cloud.bridge.service.S3Constants; +import com.cloud.bridge.persist.dao.SObjectItemDaoImpl; import com.cloud.bridge.service.UserContext; import com.cloud.bridge.service.controller.s3.ServiceProvider; import com.cloud.bridge.service.core.s3.S3BucketPolicy.PolicyAccess; @@ -75,19 +84,31 @@ import com.cloud.bridge.service.exception.ObjectAlreadyExistsException; import com.cloud.bridge.service.exception.OutOfServiceException; import com.cloud.bridge.service.exception.OutOfStorageException; import com.cloud.bridge.service.exception.PermissionDeniedException; -import com.cloud.bridge.service.exception.UnsupportedException; import com.cloud.bridge.util.DateHelper; import com.cloud.bridge.util.PolicyParser; import com.cloud.bridge.util.StringHelper; import com.cloud.bridge.util.OrderedPair; import com.cloud.bridge.util.Triple; +import com.cloud.utils.component.ComponentLocator; +import com.cloud.utils.db.DB; +import com.cloud.utils.db.Transaction; /** * The CRUD control actions to be invoked from S3BucketAction or S3ObjectAction. */ public class S3Engine { protected final static Logger logger = Logger.getLogger(S3Engine.class); - + protected final SHostDao shostDao = ComponentLocator.inject(SHostDaoImpl.class); + protected final MHostDao mhostDao = ComponentLocator.inject(MHostDaoImpl.class); + protected final static BucketPolicyDao bPolicy = ComponentLocator.inject(BucketPolicyDaoImpl.class); + protected final BucketPolicyDao bPolicyDao = ComponentLocator.inject(BucketPolicyDaoImpl.class); + protected final SBucketDao bucketDao = ComponentLocator.inject(SBucketDaoImpl.class); + protected final SAclDao aclDao = ComponentLocator.inject(SAclDaoImpl.class); + protected final static SAclDao saclDao = ComponentLocator.inject(SAclDaoImpl.class); + protected final SObjectDao objectDao = ComponentLocator.inject(SObjectDaoImpl.class); + protected final SObjectItemDao itemDao = ComponentLocator.inject(SObjectItemDaoImpl.class); + protected final SMetaDao metaDao = ComponentLocator.inject(SMetaDaoImpl.class); + protected final MHostMountDao mountDao = ComponentLocator.inject(MHostMountDaoImpl.class); private final int LOCK_ACQUIRING_TIMEOUT_SECONDS = 10; // ten seconds private final Map bucketAdapters = new HashMap(); @@ -169,61 +190,53 @@ public class S3Engine { String cannedAccessPolicy = request.getCannedAccess(); String bucketName = request.getBucketName(); response.setBucketName( bucketName ); - + Transaction txn= null; verifyBucketName( bucketName, false ); S3PolicyContext context = new S3PolicyContext( PolicyActions.CreateBucket, bucketName ); context.setEvalParam( ConditionKeys.Acl, cannedAccessPolicy ); if (PolicyAccess.DENY == verifyPolicy( context )) throw new PermissionDeniedException( "Access Denied - bucket policy DENY result" ); - - if (PersistContext.acquireNamedLock("bucket.creation", LOCK_ACQUIRING_TIMEOUT_SECONDS)) - { - OrderedPair shost_storagelocation_pair = null; - boolean success = false; - try { - SBucketDao bucketDao = new SBucketDao(); - SAclDao aclDao = new SAclDao(); - - if (bucketDao.getByName(request.getBucketName()) != null) - throw new ObjectAlreadyExistsException("Bucket already exists"); - - shost_storagelocation_pair = allocBucketStorageHost(request.getBucketName(), null); - - SBucket sbucket = new SBucket(); - sbucket.setName(request.getBucketName()); - sbucket.setCreateTime(DateHelper.currentGMTTime()); - sbucket.setOwnerCanonicalId( UserContext.current().getCanonicalUserId()); - sbucket.setShost(shost_storagelocation_pair.getFirst()); - shost_storagelocation_pair.getFirst().getBuckets().add(sbucket); - bucketDao.save(sbucket); + OrderedPair shost_storagelocation_pair = null; + boolean success = false; + try { + txn = Transaction.open(Transaction.AWSAPI_DB); - S3AccessControlList acl = request.getAcl(); - - if ( null != cannedAccessPolicy ) - setCannedAccessControls( cannedAccessPolicy, "SBucket", sbucket.getId(), sbucket ); - else if (null != acl) - aclDao.save( "SBucket", sbucket.getId(), acl ); - else setSingleAcl( "SBucket", sbucket.getId(), SAcl.PERMISSION_FULL ); - - // explicitly commit the transaction - PersistContext.commitTransaction(); - success = true; - } - finally - { - if(!success && shost_storagelocation_pair != null) { - S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(shost_storagelocation_pair.getFirst()); - bucketAdapter.deleteContainer(shost_storagelocation_pair.getSecond(), request.getBucketName()); - } - PersistContext.rollbackTransaction(false); - PersistContext.releaseNamedLock("bucket.creation"); - } - - } else { - throw new OutOfServiceException("Unable to acquire synchronization lock"); - } - + if (bucketDao.getByName(request.getBucketName()) != null) + throw new ObjectAlreadyExistsException("Bucket already exists"); + + shost_storagelocation_pair = allocBucketStorageHost( + request.getBucketName(), null); + SBucketVO sbucket = new SBucketVO(request.getBucketName(), + DateHelper.currentGMTTime(), UserContext.current() + .getCanonicalUserId(), + shost_storagelocation_pair.getFirst()); + + shost_storagelocation_pair.getFirst().getBuckets().add(sbucket); + // bucketDao.save(sbucket); + sbucket = bucketDao.persist(sbucket); + S3AccessControlList acl = request.getAcl(); + + if (null != cannedAccessPolicy) + setCannedAccessControls(cannedAccessPolicy, "SBucket", + sbucket.getId(), sbucket); + else if (null != acl) + aclDao.save("SBucket", sbucket.getId(), acl); + else + setSingleAcl("SBucket", sbucket.getId(), SAcl.PERMISSION_FULL); + + success = true; + } finally { + if (!success && shost_storagelocation_pair != null) { + S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(shost_storagelocation_pair + .getFirst()); + bucketAdapter.deleteContainer( + shost_storagelocation_pair.getSecond(), + request.getBucketName()); + } + txn.rollback(); + txn.close(); + } return response; } @@ -234,37 +247,43 @@ public class S3Engine { public S3Response handleRequest( S3DeleteBucketRequest request ) { - S3Response response = new S3Response(); - SBucketDao bucketDao = new SBucketDao(); + S3Response response = new S3Response(); + // String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName(bucketName); + Transaction txn = null; if ( sbucket != null ) - { - S3PolicyContext context = new S3PolicyContext( PolicyActions.DeleteBucket, bucketName ); - switch( verifyPolicy( context )) - { - case ALLOW: - // The bucket policy can give users permission to delete a bucket whereas ACLs cannot - break; - - case DENY: - throw new PermissionDeniedException( "Access Denied - bucket policy DENY result" ); - - case DEFAULT_DENY: - default: - // Irrespective of what the ACLs say, only the owner can delete a bucket - String client = UserContext.current().getCanonicalUserId(); - if (!client.equals( sbucket.getOwnerCanonicalId())) { - throw new PermissionDeniedException( "Access Denied - only the owner can delete a bucket" ); - } - break; - } + { + txn = Transaction.open(Transaction.AWSAPI_DB); + txn.start(); + S3PolicyContext context = new S3PolicyContext( PolicyActions.DeleteBucket, bucketName ); + switch( verifyPolicy( context )) + { + case ALLOW: + // The bucket policy can give users permission to delete a + // bucket whereas ACLs cannot + break; - + case DENY: + throw new PermissionDeniedException( + "Access Denied - bucket policy DENY result"); + + case DEFAULT_DENY: + default: + // Irrespective of what the ACLs say, only the owner can delete + // a bucket + String client = UserContext.current().getCanonicalUserId(); + if (!client.equals(sbucket.getOwnerCanonicalId())) { + throw new PermissionDeniedException( + "Access Denied - only the owner can delete a bucket"); + } + break; + } + // Delete the file from its storage location - OrderedPair host_storagelocation_pair = getBucketStorageHost(sbucket); - S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); + OrderedPair host_storagelocation_pair = getBucketStorageHost(sbucket); + S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); bucketAdapter.deleteContainer(host_storagelocation_pair.getSecond(), request.getBucketName()); // Cascade-deleting can delete related SObject/SObjectItem objects, but not SAcl, SMeta and policy objects. @@ -272,35 +291,37 @@ public class S3Engine { // (1)Get all the objects in the bucket, // (2)then all the items in each object, // (3) then all meta & acl data for each item - Set objectsInBucket = sbucket.getObjectsInBucket(); - Iterator it = objectsInBucket.iterator(); + Set objectsInBucket = sbucket.getObjectsInBucket(); + Iterator it = objectsInBucket.iterator(); while( it.hasNext()) { - SObject oneObject = (SObject)it.next(); - Set itemsInObject = oneObject.getItems(); - Iterator is = itemsInObject.iterator(); + SObjectVO oneObject = (SObjectVO)it.next(); + Set itemsInObject = oneObject.getItems(); + Iterator is = itemsInObject.iterator(); while( is.hasNext()) { - SObjectItem oneItem = (SObjectItem)is.next(); - deleteMetaData( oneItem.getId()); - deleteObjectAcls( "SObjectItem", oneItem.getId()); + SObjectItemVO oneItem = (SObjectItemVO) is.next(); + deleteMetaData(oneItem.getId()); + deleteObjectAcls("SObjectItem", oneItem.getId()); } } // Delete all the policy state associated with the bucket try { - ServiceProvider.getInstance().deleteBucketPolicy( bucketName ); - BucketPolicyDao policyDao = new BucketPolicyDao(); - policyDao.deletePolicy( bucketName ); - } - catch( Exception e ) { - logger.error("When deleting a bucket we must try to delete its policy: ", e); + ServiceProvider.getInstance().deleteBucketPolicy(bucketName); + bPolicyDao.deletePolicy(bucketName); + } catch( Exception e ) { + logger.error("When deleting a bucket we must try to delete its policy: ", e); } deleteBucketAcls( sbucket.getId()); - bucketDao.delete( sbucket ); + bucketDao.remove(sbucket.getId()); + + response.setResultCode(204); response.setResultDescription("OK"); + + txn.close(); } else { response.setResultCode(404); @@ -327,10 +348,10 @@ public class S3Engine { int maxKeys = request.getMaxKeys(); if(maxKeys <= 0) maxKeys = 1000; - SBucketDao bucketDao = new SBucketDao(); - SBucket sbucket = bucketDao.getByName(bucketName); + // + SBucketVO sbucket = bucketDao.getByName(bucketName); if (sbucket == null) throw new NoSuchObjectException("Bucket " + bucketName + " does not exist"); - + PolicyActions action = (includeVersions ? PolicyActions.ListBucketVersions : PolicyActions.ListBucket); S3PolicyContext context = new S3PolicyContext( action, bucketName ); context.setEvalParam( ConditionKeys.MaxKeys, new String( "" + maxKeys )); @@ -340,23 +361,23 @@ public class S3Engine { // Wen execting the query, request one more item so that we know how to set isTruncated flag - SObjectDao sobjectDao = new SObjectDao(); - List l = null; + List l = null; if ( includeVersions ) - l = sobjectDao.listAllBucketObjects( sbucket, prefix, marker, maxKeys+1 ); - else l = sobjectDao.listBucketObjects( sbucket, prefix, marker, maxKeys+1 ); + l = objectDao.listAllBucketObjects( sbucket, prefix, marker, maxKeys+1 ); + else l = objectDao.listBucketObjects( sbucket, prefix, marker, maxKeys+1 ); response.setBucketName(bucketName); response.setMarker(marker); response.setMaxKeys(maxKeys); response.setPrefix(prefix); response.setDelimiter(delimiter); - response.setTruncated(l.size() > maxKeys); - if(l.size() > maxKeys) { - response.setNextMarker(l.get(l.size() - 1).getNameKey()); + if (null != l ) { + response.setTruncated(l.size() > maxKeys); + if(l.size() > maxKeys) { + response.setNextMarker(l.get(l.size() - 1).getNameKey()); + } } - // If needed - SOAP response does not support versioning response.setContents( composeListBucketContentEntries(l, prefix, delimiter, maxKeys, includeVersions, request.getVersionIdMarker())); response.setCommonPrefixes( composeListBucketPrefixEntries(l, prefix, delimiter, maxKeys)); @@ -372,10 +393,10 @@ public class S3Engine { public S3ListAllMyBucketsResponse handleRequest(S3ListAllMyBucketsRequest request) { S3ListAllMyBucketsResponse response = new S3ListAllMyBucketsResponse(); - SBucketDao bucketDao = new SBucketDao(); + // "...you can only list buckets for which you are the owner." - List buckets = bucketDao.listBuckets(UserContext.current().getCanonicalUserId()); + List buckets = bucketDao.listBuckets(UserContext.current().getCanonicalUserId()); S3CanonicalUser owner = new S3CanonicalUser(); owner.setID(UserContext.current().getCanonicalUserId()); owner.setDisplayName(""); @@ -385,7 +406,7 @@ public class S3Engine { { S3ListAllMyBucketsEntry[] entries = new S3ListAllMyBucketsEntry[buckets.size()]; int i = 0; - for(SBucket bucket : buckets) + for(SBucketVO bucket : buckets) { String bucketName = bucket.getName(); S3PolicyContext context = new S3PolicyContext( PolicyActions.ListAllMyBuckets, bucketName ); @@ -409,9 +430,8 @@ public class S3Engine { public S3Response handleRequest(S3SetBucketAccessControlPolicyRequest request) { S3Response response = new S3Response(); - SBucketDao bucketDao = new SBucketDao(); String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName(bucketName); + SBucketVO sbucket = bucketDao.getByName(bucketName); if(sbucket == null) { response.setResultCode(404); response.setResultDescription("Bucket does not exist"); @@ -421,7 +441,6 @@ public class S3Engine { S3PolicyContext context = new S3PolicyContext( PolicyActions.PutBucketAcl, bucketName ); verifyAccess( context, "SBucket", sbucket.getId(), SAcl.PERMISSION_WRITE_ACL ); - SAclDao aclDao = new SAclDao(); aclDao.save("SBucket", sbucket.getId(), request.getAcl()); response.setResultCode(200); @@ -438,9 +457,8 @@ public class S3Engine { public S3AccessControlPolicy handleRequest(S3GetBucketAccessControlPolicyRequest request) { S3AccessControlPolicy policy = new S3AccessControlPolicy(); - SBucketDao bucketDao = new SBucketDao(); String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName( bucketName ); if (sbucket == null) throw new NoSuchObjectException("Bucket " + bucketName + " does not exist"); @@ -452,8 +470,8 @@ public class S3Engine { S3PolicyContext context = new S3PolicyContext( PolicyActions.GetBucketAcl, bucketName ); verifyAccess( context, "SBucket", sbucket.getId(), SAcl.PERMISSION_READ_ACL ); - SAclDao aclDao = new SAclDao(); - List grants = aclDao.listGrants("SBucket", sbucket.getId()); + + List grants = aclDao.listGrants("SBucket", sbucket.getId()); policy.setGrants(S3Grant.toGrants(grants)); return policy; } @@ -464,61 +482,69 @@ public class S3Engine { * Called from S3ObjectAction when executing at completion or when aborting multipart upload. * @param bucketName * @param uploadId - * @param verifyPermission - If false then do not check the user's permission to clean up the state + * @param verifyPermissiod - If false then do not check the user's permission to clean up the state */ - public int freeUploadParts(String bucketName, int uploadId, boolean verifyPermission) - { - // -> we need to look up the final bucket to figure out which mount point to use to save the part in - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); - if (bucket == null) { - logger.error( "initiateMultipartUpload failed since " + bucketName + " does not exist" ); - return 404; - } + public int freeUploadParts(String bucketName, int uploadId, boolean verifyPermission) { - OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); - S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); + // -> we need to look up the final bucket to figure out which mount + // point to use to save the part in + // SBucketDao bucketDao = new SBucketDao(); + SBucketVO bucket = bucketDao.getByName(bucketName); + if (bucket == null) { + logger.error("initiateMultipartUpload failed since " + bucketName + + " does not exist"); + return 404; + } - try { - MultipartLoadDao uploadDao = new MultipartLoadDao(); - OrderedPair exists = uploadDao.multipartExits( uploadId ); - if (null == exists) { - logger.error( "initiateMultipartUpload failed since multipart upload" + uploadId + " does not exist" ); - return 404; - } - - // -> the multipart initiator or bucket owner can do this action by default - if (verifyPermission) - { - String initiator = uploadDao.getInitiator( uploadId ); - if (null == initiator || !initiator.equals( UserContext.current().getAccessKey())) - { - // -> write permission on a bucket allows a PutObject / DeleteObject action on any object in the bucket - S3PolicyContext context = new S3PolicyContext( PolicyActions.AbortMultipartUpload, bucketName ); - context.setKeyName( exists.getSecond()); - verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_WRITE ); - } - } + OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); + S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); - // -> first get a list of all the uploaded files and delete one by one - S3MultipartPart[] parts = uploadDao.getParts( uploadId, 10000, 0 ); - for( int i=0; i < parts.length; i++ ) - { - bucketAdapter.deleteObject( host_storagelocation_pair.getSecond(), ServiceProvider.getInstance().getMultipartDir(), parts[i].getPath()); - } - - uploadDao.deleteUpload( uploadId ); - return 204; + try { + MultipartLoadDao uploadDao = new MultipartLoadDao(); + OrderedPair exists = uploadDao.multipartExits(uploadId); + + if (null == exists) { + logger.error("initiateMultipartUpload failed since multipart upload" + + uploadId + " does not exist"); + return 404; + } + // -> the multipart initiator or bucket owner can do this action by + // default + if (verifyPermission) { + String initiator = uploadDao.getInitiator(uploadId); + if (null == initiator + || !initiator.equals(UserContext.current() + .getAccessKey())) { + // -> write permission on a bucket allows a PutObject / + // DeleteObject action on any object in the bucket + S3PolicyContext context = new S3PolicyContext( + PolicyActions.AbortMultipartUpload, bucketName); + context.setKeyName(exists.getSecond()); + verifyAccess(context, "SBucket", bucket.getId(), + SAcl.PERMISSION_WRITE); } - catch( PermissionDeniedException e ) { - logger.error("freeUploadParts failed due to [" + e.getMessage() + "]", e); - throw e; - } - catch (Exception e) { - logger.error("freeUploadParts failed due to [" + e.getMessage() + "]", e); - return 500; - } + } + + // -> first get a list of all the uploaded files and delete one by + // one + S3MultipartPart[] parts = uploadDao.getParts(uploadId, 10000, 0); + for (int i = 0; i < parts.length; i++) { + bucketAdapter.deleteObject(host_storagelocation_pair.getSecond(), ServiceProvider.getInstance() + .getMultipartDir(), parts[i].getPath()); + } + uploadDao.deleteUpload(uploadId); + return 204; + + } catch (PermissionDeniedException e) { + logger.error("freeUploadParts failed due to [" + e.getMessage() + + "]", e); + throw e; + } catch (Exception e) { + logger.error("freeUploadParts failed due to [" + e.getMessage() + + "]", e); + return 500; + } } /** @@ -534,8 +560,7 @@ public class S3Engine { String nameKey = request.getKey(); // -> does the bucket exist and can we write to it? - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); + SBucketVO bucket = bucketDao.getByName(bucketName); if (bucket == null) { logger.error( "initiateMultipartUpload failed since " + bucketName + " does not exist" ); response.setResultCode(404); @@ -577,8 +602,8 @@ public class S3Engine { String bucketName = request.getBucketName(); // -> we need to look up the final bucket to figure out which mount point to use to save the part in - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); + //SBucketDao bucketDao = new SBucketDao(); + SBucketVO bucket = bucketDao.getByName(bucketName); if (bucket == null) { logger.error( "saveUploadedPart failed since " + bucketName + " does not exist" ); response.setResultCode(404); @@ -587,20 +612,19 @@ public class S3Engine { context.setKeyName( request.getKey()); verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_WRITE ); - OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); + OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); String itemFileName = new String( uploadId + "-" + partNumber ); InputStream is = null; try { - is = request.getDataInputStream(); - String md5Checksum = bucketAdapter.saveObject(is, host_storagelocation_pair.getSecond(), ServiceProvider.getInstance().getMultipartDir(), itemFileName); - response.setETag(md5Checksum); - - MultipartLoadDao uploadDao = new MultipartLoadDao(); - uploadDao.savePart( uploadId, partNumber, md5Checksum, itemFileName, (int)request.getContentLength()); - response.setResultCode(200); - + is = request.getDataInputStream(); + String md5Checksum = bucketAdapter.saveObject(is, host_storagelocation_pair.getSecond(), ServiceProvider.getInstance().getMultipartDir(), itemFileName); + response.setETag(md5Checksum); + MultipartLoadDao uploadDao = new MultipartLoadDao(); + uploadDao.savePart(uploadId, partNumber, md5Checksum, itemFileName,(int) request.getContentLength()); + response.setResultCode(200); + } catch (IOException e) { logger.error("UploadPart failed due to " + e.getMessage(), e); response.setResultCode(500); @@ -637,70 +661,71 @@ public class S3Engine { public S3PutObjectInlineResponse concatentateMultipartUploads(HttpServletResponse httpResp, S3PutObjectInlineRequest request, S3MultipartPart[] parts, OutputStream outputStream) throws IOException { // [A] Set up and initial error checking - S3PutObjectInlineResponse response = new S3PutObjectInlineResponse(); - String bucketName = request.getBucketName(); - String key = request.getKey(); - S3MetaDataEntry[] meta = request.getMetaEntries(); - - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); - if (bucket == null) { - logger.error( "completeMultipartUpload( failed since " + bucketName + " does not exist" ); - response.setResultCode(404); - } + S3PutObjectInlineResponse response = new S3PutObjectInlineResponse(); + String bucketName = request.getBucketName(); + String key = request.getKey(); + S3MetaDataEntry[] meta = request.getMetaEntries(); - // [B] Now we need to create the final re-assembled object - // -> the allocObjectItem checks for the bucket policy PutObject permissions - OrderedPair object_objectitem_pair = allocObjectItem(bucket, key, meta, null, request.getCannedAccess()); - OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); - - S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); - String itemFileName = object_objectitem_pair.getSecond().getStoredPath(); - - // -> Amazon defines that we must return a 200 response immediately to the client, but - // -> we don't know the version header until we hit here - httpResp.setStatus(200); - httpResp.setContentType("text/xml; charset=UTF-8"); - String version = object_objectitem_pair.getSecond().getVersion(); - if (null != version) httpResp.addHeader( "x-amz-version-id", version ); - httpResp.flushBuffer(); - + SBucketVO bucket = bucketDao.getByName(bucketName); + if (bucket == null) { + logger.error("completeMultipartUpload( failed since " + bucketName + + " does not exist"); + response.setResultCode(404); + } - // [C] Re-assemble the object from its uploaded file parts - try { - // explicit transaction control to avoid holding transaction during long file concatenation process - PersistContext.commitTransaction(); - - OrderedPair result = bucketAdapter. - concatentateObjects - ( host_storagelocation_pair.getSecond(), - bucket.getName(), - itemFileName, - ServiceProvider.getInstance().getMultipartDir(), - parts, - outputStream ); - response.setETag(result.getFirst()); - response.setLastModified(DateHelper.toCalendar( object_objectitem_pair.getSecond().getLastModifiedTime())); - - SObjectItemDao itemDao = new SObjectItemDao(); - SObjectItem item = itemDao.get( object_objectitem_pair.getSecond().getId()); - item.setMd5(result.getFirst()); - item.setStoredSize(result.getSecond().longValue()); - response.setResultCode(200); + // [B] Now we need to create the final re-assembled object + // -> the allocObjectItem checks for the bucket policy PutObject + // permissions + OrderedPair object_objectitem_pair = allocObjectItem( + bucket, key, meta, null, request.getCannedAccess()); + OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); - PersistContext.getSession().save(item); - } - catch (Exception e) { - logger.error("completeMultipartUpload failed due to " + e.getMessage(), e); - } - return response; + S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair + .getFirst()); + String itemFileName = object_objectitem_pair.getSecond() + .getStoredPath(); + + // -> Amazon defines that we must return a 200 response immediately to + // the client, but + // -> we don't know the version header until we hit here + httpResp.setStatus(200); + httpResp.setContentType("text/xml; charset=UTF-8"); + String version = object_objectitem_pair.getSecond().getVersion(); + if (null != version) + httpResp.addHeader("x-amz-version-id", version); + httpResp.flushBuffer(); + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + // [C] Re-assemble the object from its uploaded file parts + try { + // explicit transaction control to avoid holding transaction during + // long file concatenation process + txn.start(); + OrderedPair result = bucketAdapter + .concatentateObjects(host_storagelocation_pair.getSecond(), + bucket.getName(), itemFileName, ServiceProvider + .getInstance().getMultipartDir(), parts, + outputStream); + + response.setETag(result.getFirst()); + response.setLastModified(DateHelper.toCalendar(object_objectitem_pair.getSecond().getLastModifiedTime())); + SObjectItemVO item = itemDao.findById(object_objectitem_pair + .getSecond().getId()); + item.setMd5(result.getFirst()); + item.setStoredSize(result.getSecond().longValue()); + itemDao.update(item.getId(), item); + response.setResultCode(200); + } catch (Exception e) { + logger.error("completeMultipartUpload failed due to " + e.getMessage(),e); + txn.close(); + } + return response; } /** * Return a S3PutObjectInlineResponse which represents an object being created into a bucket * Called from S3ObjectAction when PUTting or POTing an object. */ - + @DB public S3PutObjectInlineResponse handleRequest(S3PutObjectInlineRequest request) { S3PutObjectInlineResponse response = new S3PutObjectInlineResponse(); @@ -710,36 +735,36 @@ public class S3Engine { S3MetaDataEntry[] meta = request.getMetaEntries(); S3AccessControlList acl = request.getAcl(); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); + SBucketVO bucket = bucketDao.getByName(bucketName); if (bucket == null) throw new NoSuchObjectException("Bucket " + bucketName + " does not exist"); // Is the caller allowed to write the object? // The allocObjectItem checks for the bucket policy PutObject permissions - OrderedPair object_objectitem_pair = allocObjectItem(bucket, key, meta, acl, request.getCannedAccess()); - OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); + OrderedPair object_objectitem_pair = allocObjectItem(bucket, key, meta, acl, request.getCannedAccess()); + OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); String itemFileName = object_objectitem_pair.getSecond().getStoredPath(); InputStream is = null; - + Transaction txn = null; try { // explicit transaction control to avoid holding transaction during file-copy process - PersistContext.commitTransaction(); + txn = Transaction.open(Transaction.AWSAPI_DB); + txn.start(); is = request.getDataInputStream(); String md5Checksum = bucketAdapter.saveObject(is, host_storagelocation_pair.getSecond(), bucket.getName(), itemFileName); response.setETag(md5Checksum); response.setLastModified(DateHelper.toCalendar( object_objectitem_pair.getSecond().getLastModifiedTime())); response.setVersion( object_objectitem_pair.getSecond().getVersion()); - SObjectItemDao itemDao = new SObjectItemDao(); - SObjectItem item = itemDao.get( object_objectitem_pair.getSecond().getId()); + //SObjectItemDaoImpl itemDao = new SObjectItemDaoImpl(); + SObjectItemVO item = itemDao.findById(object_objectitem_pair.getSecond().getId()); item.setMd5(md5Checksum); item.setStoredSize(contentLength); - PersistContext.getSession().save(item); - + itemDao.update(item.getId(), item); + txn.commit(); } catch (IOException e) { logger.error("PutObjectInline failed due to " + e.getMessage(), e); } catch (OutOfStorageException e) { @@ -752,6 +777,7 @@ public class S3Engine { logger.error("PutObjectInline unable to close stream from data handler.", e); } } + txn.close(); } return response; @@ -771,32 +797,34 @@ public class S3Engine { S3MetaDataEntry[] meta = request.getMetaEntries(); S3AccessControlList acl = request.getAcl(); - SBucketDao bucketDao = new SBucketDao(); - SBucket bucket = bucketDao.getByName(bucketName); + SBucketVO bucket = bucketDao.getByName(bucketName); if(bucket == null) throw new NoSuchObjectException("Bucket " + bucketName + " does not exist"); // Is the caller allowed to write the object? // The allocObjectItem checks for the bucket policy PutObject permissions - OrderedPair object_objectitem_pair = allocObjectItem(bucket, key, meta, acl, null); - OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); + OrderedPair object_objectitem_pair = allocObjectItem(bucket, key, meta, acl, null); + OrderedPair host_storagelocation_pair = getBucketStorageHost(bucket); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(host_storagelocation_pair.getFirst()); String itemFileName = object_objectitem_pair.getSecond().getStoredPath(); InputStream is = null; + Transaction txn = null; try { // explicit transaction control to avoid holding transaction during file-copy process - PersistContext.commitTransaction(); + + txn = Transaction.open(Transaction.AWSAPI_DB); + txn.start(); is = request.getInputStream(); String md5Checksum = bucketAdapter.saveObject(is, host_storagelocation_pair.getSecond(), bucket.getName(), itemFileName); response.setETag(md5Checksum); response.setLastModified(DateHelper.toCalendar( object_objectitem_pair.getSecond().getLastModifiedTime())); - SObjectItemDao itemDao = new SObjectItemDao(); - SObjectItem item = itemDao.get( object_objectitem_pair.getSecond().getId()); + SObjectItemVO item = itemDao.findById(object_objectitem_pair.getSecond().getId()); item.setMd5(md5Checksum); item.setStoredSize(contentLength); - PersistContext.getSession().save(item); + itemDao.update(item.getId(), item); + txn.commit(); } catch (OutOfStorageException e) { logger.error("PutObject failed due to " + e.getMessage(), e); @@ -808,6 +836,7 @@ public class S3Engine { logger.error("Unable to close stream from data handler.", e); } } + txn.close(); } return response; @@ -825,18 +854,16 @@ public class S3Engine { // [A] First find the object in the bucket S3Response response = new S3Response(); - SBucketDao bucketDao = new SBucketDao(); String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName( bucketName ); if(sbucket == null) { response.setResultCode(404); response.setResultDescription("Bucket " + bucketName + "does not exist"); return response; } - SObjectDao sobjectDao = new SObjectDao(); String nameKey = request.getKey(); - SObject sobject = sobjectDao.getByNameKey( sbucket, nameKey ); + SObjectVO sobject = objectDao.getByNameKey( sbucket, nameKey ); if(sobject == null) { response.setResultCode(404); response.setResultDescription("Object " + request.getKey() + " in bucket " + bucketName + " does not exist"); @@ -852,7 +879,7 @@ public class S3Engine { // [B] Versioning allow the client to ask for a specific version not just the latest - SObjectItem item = null; + SObjectItemVO item = null; int versioningStatus = sbucket.getVersioningStatus(); String wantVersion = request.getVersion(); if ( SBucket.VERSIONING_ENABLED == versioningStatus && null != wantVersion) @@ -875,7 +902,6 @@ public class S3Engine { verifyAccess( context, "SObjectItem", item.getId(), SAcl.PERMISSION_WRITE_ACL ); // -> the acl always goes on the instance of the object - SAclDao aclDao = new SAclDao(); aclDao.save("SObjectItem", item.getId(), request.getAcl()); response.setResultCode(200); @@ -895,15 +921,15 @@ public class S3Engine { // [A] Does the object exist that holds the ACL we are looking for? S3AccessControlPolicy policy = new S3AccessControlPolicy(); - SBucketDao bucketDao = new SBucketDao(); + String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName( bucketName ); if (sbucket == null) throw new NoSuchObjectException("Bucket " + bucketName + " does not exist"); - SObjectDao sobjectDao = new SObjectDao(); + //SObjectDaoImpl sobjectDao = new SObjectDaoImpl(); String nameKey = request.getKey(); - SObject sobject = sobjectDao.getByNameKey( sbucket, nameKey ); + SObjectVO sobject = objectDao.getByNameKey( sbucket, nameKey ); if (sobject == null) throw new NoSuchObjectException("Object " + request.getKey() + " does not exist"); @@ -916,7 +942,7 @@ public class S3Engine { // [B] Versioning allow the client to ask for a specific version not just the latest - SObjectItem item = null; + SObjectItemVO item = null; int versioningStatus = sbucket.getVersioningStatus(); String wantVersion = request.getVersion(); if ( SBucket.VERSIONING_ENABLED == versioningStatus && null != wantVersion) @@ -946,8 +972,8 @@ public class S3Engine { policy.setOwner(owner); policy.setResultCode(200); - SAclDao aclDao = new SAclDao(); - List grants = aclDao.listGrants( "SObjectItem", item.getId()); + + List grants = aclDao.listGrants( "SObjectItem", item.getId()); policy.setGrants(S3Grant.toGrants(grants)); return policy; } @@ -967,18 +993,17 @@ public class S3Engine { int resultCode = 200; // [A] Verify that the bucket and the object exist - SBucketDao bucketDao = new SBucketDao(); + String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName(bucketName); + SBucketVO sbucket = bucketDao.getByName(bucketName); if (sbucket == null) { response.setResultCode(404); response.setResultDescription("Bucket " + request.getBucketName() + " does not exist"); return response; } - SObjectDao objectDao = new SObjectDao(); String nameKey = request.getKey(); - SObject sobject = objectDao.getByNameKey( sbucket, nameKey ); + SObjectVO sobject = objectDao.getByNameKey( sbucket, nameKey ); if (sobject == null) { response.setResultCode(404); response.setResultDescription("Object " + request.getKey() + " does not exist in bucket " + request.getBucketName()); @@ -995,7 +1020,7 @@ public class S3Engine { // [B] Versioning allow the client to ask for a specific version not just the latest - SObjectItem item = null; + SObjectItemVO item = null; int versioningStatus = sbucket.getVersioningStatus(); String wantVersion = request.getVersion(); if ( SBucket.VERSIONING_ENABLED == versioningStatus && null != wantVersion) @@ -1037,15 +1062,15 @@ public class S3Engine { // [D] Return the contents of the object inline // -> extract the meta data that corresponds the specific versioned item - SMetaDao metaDao = new SMetaDao(); - List itemMetaData = metaDao.getByTarget( "SObjectItem", item.getId()); + + List itemMetaData = metaDao.getByTarget( "SObjectItem", item.getId()); if (null != itemMetaData) { int i = 0; S3MetaDataEntry[] metaEntries = new S3MetaDataEntry[ itemMetaData.size() ]; - ListIterator it = itemMetaData.listIterator(); + ListIterator it = itemMetaData.listIterator(); while( it.hasNext()) { - SMeta oneTag = (SMeta)it.next(); + SMetaVO oneTag = (SMetaVO)it.next(); S3MetaDataEntry oneEntry = new S3MetaDataEntry(); oneEntry.setName( oneTag.getName()); oneEntry.setValue( oneTag.getValue()); @@ -1068,7 +1093,7 @@ public class S3Engine { response.setVersion( item.getVersion()); if (request.isInlineData()) { - OrderedPair tupleSHostInfo = getBucketStorageHost(sbucket); + OrderedPair tupleSHostInfo = getBucketStorageHost(sbucket); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(tupleSHostInfo.getFirst()); if ( 0 <= bytesStart && 0 <= bytesEnd ) @@ -1091,18 +1116,18 @@ public class S3Engine { { // Verify that the bucket and object exist S3Response response = new S3Response(); - SBucketDao bucketDao = new SBucketDao(); + String bucketName = request.getBucketName(); - SBucket sbucket = bucketDao.getByName( bucketName ); + SBucketVO sbucket = bucketDao.getByName( bucketName ); if (sbucket == null) { response.setResultCode(404); response.setResultDescription("Bucket dosen't existsBucket " + bucketName + " does not exist"); return response; } - SObjectDao objectDao = new SObjectDao(); + String nameKey = request.getKey(); - SObject sobject = objectDao.getByNameKey( sbucket, nameKey ); + SObjectVO sobject = objectDao.getByNameKey( sbucket, nameKey ); if (sobject == null) { response.setResultCode(404); response.setResultDescription("Not FoundNo object with key " + nameKey + " exists in bucket " + bucketName+""); @@ -1112,7 +1137,7 @@ public class S3Engine { // Discover whether versioning is enabled. If so versioning requires the setting of a deletion marker. String storedPath = null; - SObjectItem item = null; + SObjectItemVO item = null; int versioningStatus = sbucket.getVersioningStatus(); if ( SBucket.VERSIONING_ENABLED == versioningStatus ) { @@ -1125,7 +1150,7 @@ public class S3Engine { if (null == wantVersion) { // If versioning is on and no versionId is given then we just write a deletion marker sobject.setDeletionMark( UUID.randomUUID().toString()); - objectDao.update( sobject ); + objectDao.update(sobject.getId(), sobject ); response.setResultDescription("true"+ sobject.getDeletionMark() +""); } else { @@ -1133,7 +1158,7 @@ public class S3Engine { String deletionMarker = sobject.getDeletionMark(); if (null != deletionMarker && wantVersion.equalsIgnoreCase( deletionMarker )) { sobject.setDeletionMark( null ); - objectDao.update( sobject ); + objectDao.update(sobject.getId(), sobject ); response.setResultDescription("" + wantVersion +""); response.setResultDescription("true"+ sobject.getDeletionMark() +""); response.setResultCode(204); @@ -1149,7 +1174,7 @@ public class S3Engine { // Providing versionId is non-null, then just delete the one item that matches the versionId from the database storedPath = item.getStoredPath(); sobject.deleteItem( item.getId()); - objectDao.update( sobject ); + objectDao.update(sobject.getId(), sobject ); response.setResultDescription("" + wantVersion +""); } } @@ -1171,9 +1196,9 @@ public class S3Engine { // Otherwiswe remove the entire object // Cascade-deleting can delete related SObject/SObjectItem objects, but not SAcl and SMeta objects. storedPath = item.getStoredPath(); - deleteMetaData( item.getId()); - deleteObjectAcls( "SObjectItem", item.getId()); - objectDao.delete( sobject ); + deleteMetaData( item.getId()); + deleteObjectAcls( "SObjectItem", item.getId()); + objectDao.remove(sobject.getId()); } } } @@ -1181,7 +1206,7 @@ public class S3Engine { // Delete the file holding the object if (null != storedPath) { - OrderedPair host_storagelocation_pair = getBucketStorageHost( sbucket ); + OrderedPair host_storagelocation_pair = getBucketStorageHost( sbucket ); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter( host_storagelocation_pair.getFirst()); bucketAdapter.deleteObject( host_storagelocation_pair.getSecond(), bucketName, storedPath ); } @@ -1192,50 +1217,48 @@ public class S3Engine { private void deleteMetaData( long itemId ) { - SMetaDao metaDao = new SMetaDao(); - List itemMetaData = metaDao.getByTarget( "SObjectItem", itemId ); + List itemMetaData = metaDao.getByTarget( "SObjectItem", itemId ); if (null != itemMetaData) { - ListIterator it = itemMetaData.listIterator(); + ListIterator it = itemMetaData.listIterator(); while( it.hasNext()) { - SMeta oneTag = (SMeta)it.next(); - metaDao.delete( oneTag ); + SMetaVO oneTag = (SMetaVO)it.next(); + metaDao.remove(oneTag.getId()); } } } private void deleteObjectAcls( String target, long itemId ) { - SAclDao aclDao = new SAclDao(); - List itemAclData = aclDao.listGrants( target, itemId ); + List itemAclData = aclDao.listGrants( target, itemId ); if (null != itemAclData) { - ListIterator it = itemAclData.listIterator(); + ListIterator it = itemAclData.listIterator(); while( it.hasNext()) { - SAcl oneTag = (SAcl)it.next(); - aclDao.delete( oneTag ); + SAclVO oneTag = (SAclVO)it.next(); + aclDao.remove(oneTag.getId()); } } } private void deleteBucketAcls( long bucketId ) { - SAclDao aclDao = new SAclDao(); - List bucketAclData = aclDao.listGrants( "SBucket", bucketId ); + + List bucketAclData = aclDao.listGrants( "SBucket", bucketId ); if (null != bucketAclData) { - ListIterator it = bucketAclData.listIterator(); + ListIterator it = bucketAclData.listIterator(); while( it.hasNext()) { - SAcl oneTag = (SAcl)it.next(); - aclDao.delete( oneTag ); + SAclVO oneTag = (SAclVO)it.next(); + aclDao.remove(oneTag.getId()); } } } - private S3ListBucketPrefixEntry[] composeListBucketPrefixEntries(List l, String prefix, String delimiter, int maxKeys) + private S3ListBucketPrefixEntry[] composeListBucketPrefixEntries(List l, String prefix, String delimiter, int maxKeys) { List entries = new ArrayList(); int count = 0; - for(SObject sobject : l) + for(SObjectVO sobject : l) { if(delimiter != null && !delimiter.isEmpty()) { @@ -1264,14 +1287,14 @@ public class S3Engine { * * TODO - how does the versionIdMarker work when there is a deletion marker in the object? */ - private S3ListBucketObjectEntry[] composeListBucketContentEntries(List l, String prefix, String delimiter, int maxKeys, boolean enableVersion, String versionIdMarker) + private S3ListBucketObjectEntry[] composeListBucketContentEntries(List l, String prefix, String delimiter, int maxKeys, boolean enableVersion, String versionIdMarker) { List entries = new ArrayList(); - SObjectItem latest = null; + SObjectItemVO latest = null; boolean hitIdMarker = false; int count = 0; - for( SObject sobject : l ) + for( SObjectVO sobject : l ) { if (delimiter != null && !delimiter.isEmpty()) { @@ -1301,10 +1324,10 @@ public class S3Engine { } else latest = sobject.getLatestVersion( false ); - Iterator it = sobject.getItems().iterator(); + Iterator it = sobject.getItems().iterator(); while( it.hasNext()) { - SObjectItem item = (SObjectItem)it.next(); + SObjectItemVO item = (SObjectItemVO)it.next(); if ( !hitIdMarker ) { @@ -1318,13 +1341,13 @@ public class S3Engine { } else { // -> if there are multiple versions of an object then just return its last version - Iterator it = sobject.getItems().iterator(); - SObjectItem lastestItem = null; + Iterator it = sobject.getItems().iterator(); + SObjectItemVO lastestItem = null; int maxVersion = 0; int version = 0; while(it.hasNext()) { - SObjectItem item = (SObjectItem)it.next(); + SObjectItemVO item = (SObjectItemVO)it.next(); String versionStr = item.getVersion(); if ( null != versionStr ) @@ -1351,7 +1374,7 @@ public class S3Engine { else return null; } - private static S3ListBucketObjectEntry toListEntry( SObject sobject, SObjectItem item, SObjectItem latest ) + private static S3ListBucketObjectEntry toListEntry( SObjectVO sobject, SObjectItemVO item, SObjectItemVO latest ) { S3ListBucketObjectEntry entry = new S3ListBucketObjectEntry(); entry.setKey(sobject.getNameKey()); @@ -1367,22 +1390,21 @@ public class S3Engine { return entry; } - private OrderedPair getBucketStorageHost(SBucket bucket) + private OrderedPair getBucketStorageHost(SBucketVO bucket) { - MHostMountDao mountDao = new MHostMountDao(); - SHost shost = bucket.getShost(); + SHostVO shost = shostDao.findById(bucket.getShostID()); if(shost.getHostType() == SHost.STORAGE_HOST_TYPE_LOCAL) { - return new OrderedPair(shost, shost.getExportRoot()); + return new OrderedPair(shost, shost.getExportRoot()); } - MHostMount mount = mountDao.getHostMount(ServiceProvider.getInstance().getManagementHostId(), shost.getId()); + MHostMountVO mount = mountDao.getHostMount(ServiceProvider.getInstance().getManagementHostId(), shost.getId()); if(mount != null) { - return new OrderedPair(shost, mount.getMountPath()); + return new OrderedPair(shost, mount.getMountPath()); } - + //return null; // need to redirect request to other node - throw new HostNotMountedException("Storage host " + shost.getHost() + " is not locally mounted"); + throw new HostNotMountedException("Storage host "); // + shost.getHost() + " is not locally mounted"); } /** @@ -1393,15 +1415,12 @@ public class S3Engine { */ private void createUploadFolder(String bucketName) { - if (PersistContext.acquireNamedLock("bucket.creation", LOCK_ACQUIRING_TIMEOUT_SECONDS)) - { try { allocBucketStorageHost(bucketName, ServiceProvider.getInstance().getMultipartDir()); - } + } finally { - PersistContext.releaseNamedLock("bucket.creation"); + } - } } /** @@ -1413,40 +1432,39 @@ public class S3Engine { * @param overrideName * @return */ - private OrderedPair allocBucketStorageHost(String bucketName, String overrideName) + private OrderedPair allocBucketStorageHost(String bucketName, String overrideName) { - MHostDao mhostDao = new MHostDao(); - SHostDao shostDao = new SHostDao(); + //SHostDao shostDao = new SHostDao(); - MHost mhost = mhostDao.get(ServiceProvider.getInstance().getManagementHostId()); + MHostVO mhost = mhostDao.findById(ServiceProvider.getInstance().getManagementHostId()); if(mhost == null) throw new OutOfServiceException("Temporarily out of service"); if(mhost.getMounts().size() > 0) { Random random = new Random(); - MHostMount[] mounts = (MHostMount[])mhost.getMounts().toArray(); - MHostMount mount = mounts[random.nextInt(mounts.length)]; + MHostMountVO[] mounts = (MHostMountVO[])mhost.getMounts().toArray(); + MHostMountVO mount = mounts[random.nextInt(mounts.length)]; S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(mount.getShost()); bucketAdapter.createContainer(mount.getMountPath(), (null != overrideName ? overrideName : bucketName)); - return new OrderedPair(mount.getShost(), mount.getMountPath()); + return new OrderedPair(mount.getShost(), mount.getMountPath()); } // To make things simple, only allow one local mounted storage root TODO - Change in the future String localStorageRoot = ServiceProvider.getInstance().getStartupProperties().getProperty("storage.root"); if(localStorageRoot != null) { - SHost localSHost = shostDao.getLocalStorageHost(mhost.getId(), localStorageRoot); + SHostVO localSHost = shostDao.getLocalStorageHost(mhost.getId(), localStorageRoot); if(localSHost == null) throw new InternalErrorException("storage.root is configured but not initialized"); S3BucketAdapter bucketAdapter = getStorageHostBucketAdapter(localSHost); bucketAdapter.createContainer(localSHost.getExportRoot(),(null != overrideName ? overrideName : bucketName)); - return new OrderedPair(localSHost, localStorageRoot); + return new OrderedPair(localSHost, localStorageRoot); } throw new OutOfStorageException("No storage host is available"); } - public S3BucketAdapter getStorageHostBucketAdapter(SHost shost) + public S3BucketAdapter getStorageHostBucketAdapter(SHostVO shost) { S3BucketAdapter adapter = bucketAdapters.get(shost.getHostType()); if(adapter == null) @@ -1464,17 +1482,13 @@ public class S3Engine { * @throws IOException */ @SuppressWarnings("deprecation") - public OrderedPair allocObjectItem(SBucket bucket, String nameKey, S3MetaDataEntry[] meta, S3AccessControlList acl, String cannedAccessPolicy) + public OrderedPair allocObjectItem(SBucketVO bucket, String nameKey, S3MetaDataEntry[] meta, S3AccessControlList acl, String cannedAccessPolicy) { - SObjectDao objectDao = new SObjectDao(); - SObjectItemDao objectItemDao = new SObjectItemDao(); - SMetaDao metaDao = new SMetaDao(); - SAclDao aclDao = new SAclDao(); - SObjectItem item = null; + SObjectItemVO item = null; int versionSeq = 1; int versioningStatus = bucket.getVersioningStatus(); - Session session = PersistContext.getSession(); + //Session session = PersistContext.getSession(); // [A] To write into a bucket the user must have write permission to that bucket S3PolicyContext context = new S3PolicyContext( PolicyActions.PutObject, bucket.getName()); @@ -1482,65 +1496,79 @@ public class S3Engine { context.setEvalParam( ConditionKeys.Acl, cannedAccessPolicy); verifyAccess( context, "SBucket", bucket.getId(), SAcl.PERMISSION_WRITE ); // TODO - check this validates plain POSTs + Transaction txn = Transaction.open(Transaction.AWSAPI_DB); + txn.start(); // [B] If versioning is off them we over write a null object item - SObject object = objectDao.getByNameKey(bucket, nameKey); + SObjectVO object = objectDao.getByNameKey(bucket, nameKey); if ( object != null ) - { + { // -> if versioning is on create new object items if ( SBucket.VERSIONING_ENABLED == versioningStatus ) { - session.lock(object, LockMode.UPGRADE); + versionSeq = object.getNextSequence(); object.setNextSequence(versionSeq + 1); - session.save(object); + objectDao.update(object.getId(), object); - item = new SObjectItem(); + item = new SObjectItemVO(); item.setTheObject(object); object.getItems().add(item); + item.setsObjectID(object.getId()); item.setVersion(String.valueOf(versionSeq)); Date ts = DateHelper.currentGMTTime(); item.setCreateTime(ts); item.setLastAccessTime(ts); item.setLastModifiedTime(ts); - session.save(item); + item = itemDao.persist(item); + txn.commit(); + //session.save(item); } else { // -> find an object item with a null version, can be null // if bucket started out with versioning enabled and was then suspended - item = objectItemDao.getByObjectIdNullVersion( object.getId()); + item = itemDao.getByObjectIdNullVersion( object.getId()); if (item == null) { - item = new SObjectItem(); + item = new SObjectItemVO(); item.setTheObject(object); + item.setsObjectID(object.getId()); object.getItems().add(item); Date ts = DateHelper.currentGMTTime(); item.setCreateTime(ts); item.setLastAccessTime(ts); item.setLastModifiedTime(ts); - session.save(item); + item = itemDao.persist(item); + txn.commit(); } } } else - { // -> there is no object nor an object item - object = new SObject(); - object.setBucket(bucket); - object.setNameKey(nameKey); - object.setNextSequence(2); - object.setCreateTime(DateHelper.currentGMTTime()); - object.setOwnerCanonicalId(UserContext.current().getCanonicalUserId()); - session.save(object); - - item = new SObjectItem(); - item.setTheObject(object); - object.getItems().add(item); - if (SBucket.VERSIONING_ENABLED == versioningStatus) item.setVersion(String.valueOf(versionSeq)); - Date ts = DateHelper.currentGMTTime(); - item.setCreateTime(ts); - item.setLastAccessTime(ts); - item.setLastModifiedTime(ts); - session.save(item); + { + Transaction txn1 = Transaction.open(Transaction.AWSAPI_DB); + txn1.start(); + // -> there is no object nor an object item + object = new SObjectVO(); + object.setBucket(bucket); + object.setNameKey(nameKey); + object.setNextSequence(2); + object.setBucketID(bucket.getId()); + object.setCreateTime(DateHelper.currentGMTTime()); + object.setOwnerCanonicalId(UserContext.current().getCanonicalUserId()); + object = objectDao.persist(object); + item = new SObjectItemVO(); + item.setTheObject(object); + item.setsObjectID(object.getId()); + object.getItems().add(item); + if (SBucket.VERSIONING_ENABLED == versioningStatus) item.setVersion(String.valueOf(versionSeq)); + Date ts = DateHelper.currentGMTTime(); + item.setCreateTime(ts); + item.setLastAccessTime(ts); + item.setLastModifiedTime(ts); + item = itemDao.persist(item); + txn.commit(); + txn.close(); + } @@ -1570,8 +1598,9 @@ public class S3Engine { aclDao.save( "SObjectItem", item.getId(), acl ); } - session.update(item); - return new OrderedPair(object, item); + itemDao.update(item.getId(), item); + txn.close(); + return new OrderedPair(object, item); } @@ -1579,11 +1608,11 @@ public class S3Engine { * Access controls that are specified via the "x-amz-acl:" headers in REST requests. * Note that canned policies can be set when the object's contents are set */ - public void setCannedAccessControls( String cannedAccessPolicy, String target, long objectId, SBucket bucket ) + public void setCannedAccessControls( String cannedAccessPolicy, String target, long objectId, SBucketVO bucket ) { // Find the permission and symbol for the principal corresponding to the requested cannedAccessPolicy Triple permission_permission_symbol_triple = - SAcl.getCannedAccessControls(cannedAccessPolicy, target, bucket.getOwnerCanonicalId()); + SAclVO.getCannedAccessControls(cannedAccessPolicy, target, bucket.getOwnerCanonicalId()); if ( null == permission_permission_symbol_triple.getThird() ) setSingleAcl(target, objectId, permission_permission_symbol_triple.getFirst()); else @@ -1599,7 +1628,6 @@ public class S3Engine { private void setSingleAcl( String target, long targetId, int permission ) { - SAclDao aclDao = new SAclDao(); S3AccessControlList defaultAcl = new S3AccessControlList(); // -> if an annoymous request, then do not rewrite the ACL @@ -1626,7 +1654,6 @@ public class S3Engine { */ private void setDefaultAcls( String target, long objectId, int permission1, int permission2, String owner ) { - SAclDao aclDao = new SAclDao(); S3AccessControlList defaultAcl = new S3AccessControlList(); // -> object owner @@ -1712,20 +1739,18 @@ public class S3Engine { { if (SAcl.PERMISSION_PASS == requestedPermission) return; - SAclDao aclDao = new SAclDao(); - // If an annoymous request, then canonicalUserId is an empty string String userId = UserContext.current().getCanonicalUserId(); if ( 0 == userId.length()) { // Is an anonymous principal ACL set for this ? - if (hasPermission( aclDao.listGrants( target, targetId, "A" ), requestedPermission )) return; + if (hasPermission( saclDao.listGrants( target, targetId, "A" ), requestedPermission )) return; } else { - if (hasPermission( aclDao.listGrants( target, targetId, userId ), requestedPermission )) return; + if (hasPermission( saclDao.listGrants( target, targetId, userId ), requestedPermission )) return; // Or alternatively is there is any principal authenticated ACL set for this ? - if (hasPermission( aclDao.listGrants( target, targetId, "*" ), requestedPermission )) return; + if (hasPermission( saclDao.listGrants( target, targetId, "*" ), requestedPermission )) return; } // No privileges implies that no access is allowed in the case of an anonymous user throw new PermissionDeniedException( "Access Denied - ACLs do not give user the required permission" ); @@ -1749,8 +1774,11 @@ public class S3Engine { // -> do we have to load it from the database (any other value means there is no policy)? if (-1 == result.getSecond().intValue()) { - BucketPolicyDao policyDao = new BucketPolicyDao(); - String policyInJson = policyDao.getPolicy( context.getBucketName()); + BucketPolicyVO policyvo = bPolicy.getByName(context.getBucketName()); + String policyInJson = null; + if (null != policyvo) + policyInJson = policyvo.getPolicy(); + // -> place in cache that no policy exists in the database if (null == policyInJson) { ServiceProvider.getInstance().setBucketPolicy(context.getBucketName(), null); @@ -1835,13 +1863,13 @@ public class S3Engine { } } - private static boolean hasPermission( List privileges, int requestedPermission ) + private static boolean hasPermission( List privileges, int requestedPermission ) { - ListIterator it = privileges.listIterator(); + ListIterator it = privileges.listIterator(); while( it.hasNext()) { // True providing the requested permission is contained in one or the granted rights for this user. False otherwise. - SAcl rights = (SAcl)it.next(); + SAclVO rights = (SAclVO)it.next(); int permission = rights.getPermission(); if (requestedPermission == (permission & requestedPermission)) return true; } diff --git a/awsapi/src/com/cloud/bridge/service/core/s3/S3Grant.java b/awsapi/src/com/cloud/bridge/service/core/s3/S3Grant.java index b3c07beb16a..28e30e25c05 100644 --- a/awsapi/src/com/cloud/bridge/service/core/s3/S3Grant.java +++ b/awsapi/src/com/cloud/bridge/service/core/s3/S3Grant.java @@ -19,6 +19,7 @@ package com.cloud.bridge.service.core.s3; import java.util.List; import com.cloud.bridge.model.SAcl; +import com.cloud.bridge.model.SAclVO; import com.cloud.bridge.model.SBucket; import com.cloud.bridge.service.exception.UnsupportedException; @@ -64,12 +65,12 @@ public class S3Grant { /* Return an array of S3Grants holding the permissions of grantees by grantee type and their canonicalUserIds. * Used by S3 engine to get ACL policy requests for buckets and objects. */ - public static S3Grant[] toGrants(List grants) { + public static S3Grant[] toGrants(List grants) { if(grants != null) { S3Grant[] entries = new S3Grant[grants.size()]; int i = 0; - for(SAcl acl: grants) { + for(SAclVO acl: grants) { entries[i] = new S3Grant(); entries[i].setGrantee(acl.getGranteeType()); entries[i].setCanonicalUserID(acl.getGranteeCanonicalId()); diff --git a/awsapi/src/com/cloud/bridge/util/CloudSessionFactory.java b/awsapi/src/com/cloud/bridge/util/CloudSessionFactory.java deleted file mode 100644 index 404f4636a8b..00000000000 --- a/awsapi/src/com/cloud/bridge/util/CloudSessionFactory.java +++ /dev/null @@ -1,106 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.util; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.Properties; - -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.cfg.Configuration; -import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; -import org.jasypt.properties.EncryptableProperties; -import org.apache.log4j.Logger; - -public class CloudSessionFactory { - private static CloudSessionFactory instance; - public static final Logger logger = Logger.getLogger(CloudSessionFactory.class); - - private SessionFactory factory; - - private CloudSessionFactory() { - Configuration cfg = new Configuration(); - File file = ConfigurationHelper.findConfigurationFile("hibernate.cfg.xml"); - - File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties"); - Properties dbProp = null; - String dbName = null; - String dbHost = null; - String dbUser = null; - String dbPassword = null; - String dbPort = null; - - if (null != propertiesFile) { - - if(EncryptionSecretKeyCheckerUtil.useEncryption()){ - StandardPBEStringEncryptor encryptor = EncryptionSecretKeyCheckerUtil.getEncryptor(); - dbProp = new EncryptableProperties(encryptor); - } else { - dbProp = new Properties(); - } - - try { - dbProp.load( new FileInputStream( propertiesFile )); - } catch (FileNotFoundException e) { - logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e); - } catch (IOException e) { - logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e); - } - } - - - // - // we are packaging hibernate mapping files along with the class files, - // make sure class loader use the same class path when initializing hibernate mapping. - // This is important when we are deploying and testing at different environment (Tomcat/JUnit test runner) - // - if(file != null && dbProp != null){ - Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); - cfg.configure(file); - - dbHost = dbProp.getProperty( "db.cloud.host" ); - dbName = dbProp.getProperty( "db.awsapi.name" ); - dbUser = dbProp.getProperty( "db.cloud.username" ); - dbPassword = dbProp.getProperty( "db.cloud.password" ); - dbPort = dbProp.getProperty( "db.cloud.port" ); - - cfg.setProperty("hibernate.connection.url", "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName); - cfg.setProperty("hibernate.connection.username", dbUser); - cfg.setProperty("hibernate.connection.password", dbPassword); - - - factory = cfg.buildSessionFactory(); - }else{ - logger.warn("Unable to open load db configuration"); - throw new RuntimeException("nable to open load db configuration"); - } - } - - public synchronized static CloudSessionFactory getInstance() { - if(instance == null) { - instance = new CloudSessionFactory(); - } - return instance; - } - - public Session openSession() { - return factory.openSession(); - } -} diff --git a/awsapi/src/com/cloud/bridge/util/CloudStackSessionFactory.java b/awsapi/src/com/cloud/bridge/util/CloudStackSessionFactory.java deleted file mode 100644 index ac5e2d5d71b..00000000000 --- a/awsapi/src/com/cloud/bridge/util/CloudStackSessionFactory.java +++ /dev/null @@ -1,106 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.util; - -import java.io.File; -import java.io.FileInputStream; -import java.io.FileNotFoundException; -import java.io.IOException; -import java.util.Properties; - -import org.hibernate.Session; -import org.hibernate.SessionFactory; -import org.hibernate.cfg.Configuration; -import org.jasypt.encryption.pbe.StandardPBEStringEncryptor; -import org.jasypt.properties.EncryptableProperties; -import org.apache.log4j.Logger; - -public class CloudStackSessionFactory { - private static CloudStackSessionFactory instance; - public static final Logger logger = Logger.getLogger(CloudStackSessionFactory.class); - - private SessionFactory factory; - - private CloudStackSessionFactory() { - Configuration cfg = new Configuration(); - File file = ConfigurationHelper.findConfigurationFile("CloudStack.cfg.xml"); - - File propertiesFile = ConfigurationHelper.findConfigurationFile("db.properties"); - Properties dbProp = null; - String dbName = null; - String dbHost = null; - String dbUser = null; - String dbPassword = null; - String dbPort = null; - - if (null != propertiesFile) { - - if(EncryptionSecretKeyCheckerUtil.useEncryption()){ - StandardPBEStringEncryptor encryptor = EncryptionSecretKeyCheckerUtil.getEncryptor(); - dbProp = new EncryptableProperties(encryptor); - } else { - dbProp = new Properties(); - } - - try { - dbProp.load( new FileInputStream( propertiesFile )); - } catch (FileNotFoundException e) { - logger.warn("Unable to open properties file: " + propertiesFile.getAbsolutePath(), e); - } catch (IOException e) { - logger.warn("Unable to read properties file: " + propertiesFile.getAbsolutePath(), e); - } - } - - - // - // we are packaging hibernate mapping files along with the class files, - // make sure class loader use the same class path when initializing hibernate mapping. - // This is important when we are deploying and testing at different environment (Tomcat/JUnit test runner) - // - if(file != null && dbProp != null){ - Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); - cfg.configure(file); - - dbHost = dbProp.getProperty( "db.cloud.host" ); - dbName = dbProp.getProperty( "db.cloud.name" ); - dbUser = dbProp.getProperty( "db.cloud.username" ); - dbPassword = dbProp.getProperty( "db.cloud.password" ); - dbPort = dbProp.getProperty( "db.cloud.port" ); - - cfg.setProperty("hibernate.connection.url", "jdbc:mysql://" + dbHost + ":" + dbPort + "/" + dbName); - cfg.setProperty("hibernate.connection.username", dbUser); - cfg.setProperty("hibernate.connection.password", dbPassword); - - - factory = cfg.buildSessionFactory(); - }else{ - logger.warn("Unable to open load db configuration"); - throw new RuntimeException("nable to open load db configuration"); - } - } - - public synchronized static CloudStackSessionFactory getInstance() { - if(instance == null) { - instance = new CloudStackSessionFactory(); - } - return instance; - } - - public Session openSession() { - return factory.openSession(); - } -} diff --git a/awsapi/src/com/cloud/bridge/util/QueryHelper.java b/awsapi/src/com/cloud/bridge/util/QueryHelper.java deleted file mode 100644 index 1a1b58290e1..00000000000 --- a/awsapi/src/com/cloud/bridge/util/QueryHelper.java +++ /dev/null @@ -1,85 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -package com.cloud.bridge.util; - -import java.io.Serializable; -import java.math.BigDecimal; -import java.math.BigInteger; -import java.util.Calendar; -import java.util.Date; -import java.util.List; -import java.util.Locale; - -import org.hibernate.Query; - -public class QueryHelper { - public static void bindParameters(Query query, Object[] params) { - int pos = 0; - if(params != null && params.length > 0) { - for(Object param : params) { - if(param instanceof Byte) - query.setByte(pos++, ((Byte)param).byteValue()); - else if(param instanceof Short) - query.setShort(pos++, ((Short)param).shortValue()); - else if(param instanceof Integer) - query.setInteger(pos++, ((Integer)param).intValue()); - else if(param instanceof Long) - query.setLong(pos++, ((Long)param).longValue()); - else if(param instanceof Float) - query.setFloat(pos++, ((Float)param).floatValue()); - else if(param instanceof Double) - query.setDouble(pos++, ((Double)param).doubleValue()); - else if(param instanceof Boolean) - query.setBoolean(pos++, ((Boolean)param).booleanValue()); - else if(param instanceof Character) - query.setCharacter(pos++, ((Character)param).charValue()); - else if(param instanceof Date) - query.setDate(pos++, (Date)param); - else if(param instanceof Calendar) - query.setCalendar(pos++, (Calendar)param); - else if(param instanceof CalendarDateParam) - query.setCalendarDate(pos++, ((CalendarDateParam)param).dateValue()); - else if(param instanceof TimestampParam) - query.setTimestamp(pos++, ((TimestampParam)param).timestampValue()); - else if(param instanceof TimeParam) - query.setTime(pos++, ((TimeParam)param).timeValue()); - else if(param instanceof String) - query.setString(pos++, (String)param); - else if(param instanceof TextParam) - query.setText(pos++, ((TextParam)param).textValue()); - else if(param instanceof byte[]) - query.setBinary(pos++, (byte[])param); - else if(param instanceof BigDecimal) - query.setBigDecimal(pos++, (BigDecimal)param); - else if(param instanceof BigInteger) - query.setBigInteger(pos++, (BigInteger)param); - else if(param instanceof Locale) - query.setLocale(pos++, (Locale)param); - else if(param instanceof EntityParam) - query.setEntity(pos++, ((EntityParam)param).entityValue()); - else if(param instanceof Serializable) - query.setSerializable(pos++, (Serializable)param); - else - query.setEntity(pos++, param); - } - } - } - - public static List executeQuery(Query query) { - return (List)query.list(); - } -} diff --git a/awsapi/src/com/cloud/stack/models/CloudStackAccount.hbm.xml b/awsapi/src/com/cloud/stack/models/CloudStackAccount.hbm.xml deleted file mode 100644 index 799679798d8..00000000000 --- a/awsapi/src/com/cloud/stack/models/CloudStackAccount.hbm.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/stack/models/CloudStackConfiguration.hbm.xml b/awsapi/src/com/cloud/stack/models/CloudStackConfiguration.hbm.xml deleted file mode 100644 index 134e6f10a3e..00000000000 --- a/awsapi/src/com/cloud/stack/models/CloudStackConfiguration.hbm.xml +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - - - - - - - - - - - - - - - diff --git a/awsapi/src/com/cloud/stack/models/CloudStackServiceOffering.hbm.xml b/awsapi/src/com/cloud/stack/models/CloudStackServiceOffering.hbm.xml deleted file mode 100644 index 2bbcfd16dc6..00000000000 --- a/awsapi/src/com/cloud/stack/models/CloudStackServiceOffering.hbm.xml +++ /dev/null @@ -1,34 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/build/build-aws-api.xml b/build/build-aws-api.xml index 04db51626f0..9ab267e6170 100644 --- a/build/build-aws-api.xml +++ b/build/build-aws-api.xml @@ -223,6 +223,7 @@ + diff --git a/client/tomcatconf/components.xml.in b/client/tomcatconf/components.xml.in index beaac13d922..3c4b9fd5b16 100755 --- a/client/tomcatconf/components.xml.in +++ b/client/tomcatconf/components.xml.in @@ -226,4 +226,31 @@ under the License. - + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/deps/awsapi-lib/cloud-cglib.jar b/deps/awsapi-lib/cloud-cglib.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/deps/awsapi-lib/cloud-commons-dbcp-1.4.jar b/deps/awsapi-lib/cloud-commons-dbcp-1.4.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/deps/awsapi-lib/cloud-commons-pool-1.5.6.jar b/deps/awsapi-lib/cloud-commons-pool-1.5.6.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/deps/awsapi-lib/cloud-ehcache.jar b/deps/awsapi-lib/cloud-ehcache.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/deps/awsapi-lib/cloud-javax.persistence-2.0.0.jar b/deps/awsapi-lib/cloud-javax.persistence-2.0.0.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/deps/awsapi-lib/cloud-utils.jar b/deps/awsapi-lib/cloud-utils.jar new file mode 100644 index 00000000000..e69de29bb2d diff --git a/utils/src/com/cloud/utils/db/Transaction.java b/utils/src/com/cloud/utils/db/Transaction.java index fc49bb02580..755de8b55df 100755 --- a/utils/src/com/cloud/utils/db/Transaction.java +++ b/utils/src/com/cloud/utils/db/Transaction.java @@ -79,6 +79,7 @@ public class Transaction { public static final short CLOUD_DB = 0; public static final short USAGE_DB = 1; + public static final short AWSAPI_DB = 2; public static final short CONNECTED_DB = -1; private static AtomicLong s_id = new AtomicLong(); @@ -223,7 +224,18 @@ public class Transaction { return null; } } - + public static Connection getStandaloneAwsapiConnection() { + try { + Connection conn = s_awsapiDS.getConnection(); + if (s_connLogger.isTraceEnabled()) { + s_connLogger.trace("Retrieving a standalone connection for usage: dbconn" + System.identityHashCode(conn)); + } + return conn; + } catch (SQLException e) { + s_logger.warn("Unexpected exception: ", e); + return null; + } +} protected void attach(TransactionAttachment value) { _stack.push(new StackElement(ATTACHMENT, value)); } @@ -525,8 +537,18 @@ public class Transaction { throw new CloudRuntimeException("Database is not initialized, process is dying?"); } break; + case AWSAPI_DB: + if(s_awsapiDS != null) { + _conn = s_awsapiDS.getConnection(); + } else { + s_logger.warn("A static-initialized variable becomes null, process is dying?"); + throw new CloudRuntimeException("Database is not initialized, process is dying?"); + } + break; + default: - throw new CloudRuntimeException("No database selected for the transaction"); + + throw new CloudRuntimeException("No database selected for the transaction"); } _conn.setAutoCommit(!_txn); @@ -953,6 +975,7 @@ public class Transaction { private static DataSource s_ds; private static DataSource s_usageDS; + private static DataSource s_awsapiDS; static { try { final File dbPropsFile = PropertiesUtil.findConfigFile("db.properties"); @@ -1035,6 +1058,17 @@ public class Transaction { final PoolableConnectionFactory usagePoolableConnectionFactory = new PoolableConnectionFactory(usageConnectionFactory, usageConnectionPool, new StackKeyedObjectPoolFactory(), null, false, false); s_usageDS = new PoolingDataSource(usagePoolableConnectionFactory.getPool()); + + //configure awsapi db + final String awsapiDbName = dbProps.getProperty("db.awsapi.name"); + final GenericObjectPool awsapiConnectionPool = new GenericObjectPool(null, usageMaxActive, GenericObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION, + usageMaxWait, usageMaxIdle); + final ConnectionFactory awsapiConnectionFactory = new DriverManagerConnectionFactory("jdbc:mysql://"+cloudHost + ":" + cloudPort + "/" + awsapiDbName + + "?autoReconnect="+usageAutoReconnect, cloudUsername, cloudPassword); + final PoolableConnectionFactory awsapiPoolableConnectionFactory = new PoolableConnectionFactory(awsapiConnectionFactory, awsapiConnectionPool, + new StackKeyedObjectPoolFactory(), null, false, false); + s_awsapiDS = new PoolingDataSource(awsapiPoolableConnectionFactory.getPool()); + } catch (final Exception e) { final GenericObjectPool connectionPool = new GenericObjectPool(null, 5); final ConnectionFactory connectionFactory = new DriverManagerConnectionFactory("jdbc:mysql://localhost:3306/cloud", "cloud", "cloud");