hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From e...@apache.org
Subject [3/3] hadoop git commit: HDDS-657. Remove {volume} path segments from all the remaining rest endpoints. Contributed by Elek, Marton.
Date Tue, 16 Oct 2018 15:41:45 GMT
HDDS-657. Remove {volume} path segments from all the remaining rest endpoints. Contributed by Elek, Marton.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/0c2914e5
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/0c2914e5
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/0c2914e5

Branch: refs/heads/trunk
Commit: 0c2914e582587c066db1b2c1530e57d5c078577a
Parents: 0bf8a11
Author: Márton Elek <elek@apache.org>
Authored: Tue Oct 16 15:14:05 2018 +0200
Committer: Márton Elek <elek@apache.org>
Committed: Tue Oct 16 17:30:53 2018 +0200

----------------------------------------------------------------------
 .../dist/src/main/smoketest/s3/README.md        |  27 +++
 .../dist/src/main/smoketest/s3/__init__.robot   |  21 ++
 .../dist/src/main/smoketest/s3/awscli.robot     |  63 ------
 .../dist/src/main/smoketest/s3/awss3.robot      |  47 ++++
 .../dist/src/main/smoketest/s3/buckethead.robot |  34 +++
 .../dist/src/main/smoketest/s3/bucketlist.robot |  32 +++
 .../dist/src/main/smoketest/s3/bucketv2.robot   |  66 ------
 .../dist/src/main/smoketest/s3/bucketv4.robot   |  71 ------
 .../src/main/smoketest/s3/commonawslib.robot    |  43 +++-
 .../src/main/smoketest/s3/objectdelete.robot    |  72 ++++++
 .../src/main/smoketest/s3/objectputget.robot    |  42 ++++
 hadoop-ozone/dist/src/main/smoketest/test.sh    |   5 +-
 .../apache/hadoop/ozone/s3/EndpointBase.java    | 158 -------------
 .../hadoop/ozone/s3/bucket/DeleteBucket.java    |  67 ------
 .../hadoop/ozone/s3/bucket/HeadBucket.java      |  61 -----
 .../hadoop/ozone/s3/bucket/ListBucket.java      |  74 -------
 .../ozone/s3/bucket/ListBucketResponse.java     |  55 -----
 .../hadoop/ozone/s3/bucket/PutBucket.java       |  79 -------
 .../hadoop/ozone/s3/bucket/package-info.java    |  30 ---
 .../ozone/s3/commontypes/IsoDateAdapter.java    |   2 +-
 .../ozone/s3/endpoint/BucketEndpoint.java       | 199 +++++++++++++++++
 .../hadoop/ozone/s3/endpoint/EndpointBase.java  | 213 ++++++++++++++++++
 .../ozone/s3/endpoint/ListBucketResponse.java   |  55 +++++
 .../ozone/s3/endpoint/ListObjectResponse.java   | 158 +++++++++++++
 .../ozone/s3/endpoint/ObjectEndpoint.java       | 222 +++++++++++++++++++
 .../hadoop/ozone/s3/endpoint/RootEndpoint.java  |  82 +++++++
 .../hadoop/ozone/s3/endpoint/package-info.java  |  30 +++
 .../hadoop/ozone/s3/object/DeleteObject.java    |  51 -----
 .../hadoop/ozone/s3/object/HeadObject.java      |  74 -------
 .../hadoop/ozone/s3/object/ListObject.java      | 119 ----------
 .../ozone/s3/object/ListObjectResponse.java     | 147 ------------
 .../hadoop/ozone/s3/object/PutObject.java       |  92 --------
 .../hadoop/ozone/s3/object/package-info.java    |  29 ---
 .../hadoop/ozone/client/OzoneVolumeStub.java    |   3 +-
 .../ozone/s3/bucket/TestBucketResponse.java     |  40 ----
 .../ozone/s3/bucket/TestDeleteBucket.java       |  99 ---------
 .../hadoop/ozone/s3/bucket/TestGetBucket.java   | 114 ----------
 .../hadoop/ozone/s3/bucket/TestHeadBucket.java  |  85 -------
 .../hadoop/ozone/s3/bucket/TestListBucket.java  |  97 --------
 .../hadoop/ozone/s3/bucket/package-info.java    |  21 --
 .../ozone/s3/endpoint/TestBucketDelete.java     | 100 +++++++++
 .../hadoop/ozone/s3/endpoint/TestBucketGet.java | 115 ++++++++++
 .../ozone/s3/endpoint/TestBucketHead.java       |  71 ++++++
 .../ozone/s3/endpoint/TestBucketResponse.java   |  38 ++++
 .../ozone/s3/endpoint/TestObjectDelete.java     |  60 +++++
 .../hadoop/ozone/s3/endpoint/TestObjectGet.java |  80 +++++++
 .../ozone/s3/endpoint/TestObjectHead.java       |  96 ++++++++
 .../hadoop/ozone/s3/endpoint/TestPutObject.java |  91 ++++++++
 .../hadoop/ozone/s3/endpoint/TestRootList.java  |  79 +++++++
 .../hadoop/ozone/s3/endpoint/package-info.java  |  21 ++
 .../ozone/s3/object/TestDeleteObject.java       |  56 -----
 .../hadoop/ozone/s3/object/TestHeadObject.java  |  95 --------
 .../hadoop/ozone/s3/object/TestPutObject.java   |  89 --------
 .../hadoop/ozone/s3/object/package-info.java    |  21 --
 54 files changed, 2032 insertions(+), 1959 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/README.md
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/README.md b/hadoop-ozone/dist/src/main/smoketest/s3/README.md
new file mode 100644
index 0000000..884ba2e
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/README.md
@@ -0,0 +1,27 @@
+<!---
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+   http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License. See accompanying LICENSE file.
+-->
+
+## Ozone S3 Gatway Acceptance Tests
+
+Note: the aws cli based acceptance tests can be cross-checked with the original AWS s3 endpoint.
+
+You need to
+
+  1. Create a bucket
+  2. Configure your local aws cli
+  3. Set bucket/endpointurl during the robot test execution
+
+```
+robot -v bucket:ozonetest -v OZONE_S3_SET_CREDENTIALS:false -v ENDPOINT_URL:https://s3.us-east-2.amazonaws.com smoketest/s3
+```

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot b/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot
new file mode 100644
index 0000000..f1bbea9
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/__init__.robot
@@ -0,0 +1,21 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            ./commonawslib.robot
+Test Setup          Setup s3 tests
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot b/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot
deleted file mode 100644
index b26ad91..0000000
--- a/hadoop-ozone/dist/src/main/smoketest/s3/awscli.robot
+++ /dev/null
@@ -1,63 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-*** Settings ***
-Documentation       S3 gateway test with aws cli
-Library             OperatingSystem
-Library             String
-Resource            ../commonlib.robot
-
-*** Variables ***
-${ENDPOINT_URL}       http://s3g:9878
-
-*** Keywords ***
-Execute AWSCli
-    [Arguments]       ${command}
-    ${output} =       Execute          aws s3 --endpoint-url ${ENDPOINT_URL}/${VOLUME} ${command}
-    [return]          ${output}
-
-*** Test Cases ***
-
-Create volume and bucket for the tests
-    ${postfix} =        Generate Random String  5  [NUMBERS]
-    Set Suite Variable  ${BUCKET}                  bucket-${postfix}
-    Set Suite Variable  ${VOLUME}                  vol-${postfix}
-                        Log                        Testing s3 commands in /${VOLUME}/${BUCKET}
-    ${result} =         Execute                    ozone sh volume create /${VOLUME} --user hadoop
-    ${result} =         Execute                    ozone sh bucket create /${VOLUME}/${BUCKET}
-
-Install aws s3 cli
-                        Execute                    sudo apt-get install -y awscli
-                        Set Environment Variable   AWS_ACCESS_KEY_ID       ANYID
-                        Set Environment Variable   AWS_SECRET_ACCESS_KEY   ANYKEY
-
-File upload and directory list
-                        Execute                   date > /tmp/testfile
-    ${result} =         Execute AWSCli            cp /tmp/testfile s3://${BUCKET}
-                        Should contain            ${result}         upload
-    ${result} =         Execute AWSCli            cp /tmp/testfile s3://${BUCKET}/dir1/dir2/file
-                        Should contain            ${result}         upload
-    ${result} =         Execute AWSCli            ls s3://${BUCKET}
-                        Should contain            ${result}         testfile
-                        Should contain            ${result}         dir1
-                        Should not contain        ${result}         dir2
-    ${result} =         Execute AWSCli            ls s3://${BUCKET}/dir1/
-                        Should not contain        ${result}         testfile
-                        Should not contain        ${result}         dir1
-                        Should contain            ${result}         dir2
-    ${result} =         Execute AWSCli            ls s3://${BUCKET}/dir1/dir2/
-                        Should not contain        ${result}         testfile
-                        Should not contain        ${result}         dir1
-                        Should contain            ${result}         file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot
new file mode 100644
index 0000000..79db688
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/awss3.robot
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       S3 gateway test with aws cli
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            ./commonawslib.robot
+Suite Setup         Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL}       http://s3g:9878
+${BUCKET}             generated
+
+*** Test Cases ***
+
+File upload and directory list
+                        Execute                   date > /tmp/testfile
+    ${result} =         Execute AWSS3Cli          cp /tmp/testfile s3://${BUCKET}
+                        Should contain            ${result}         upload
+    ${result} =         Execute AWSS3Cli          cp /tmp/testfile s3://${BUCKET}/dir1/dir2/file
+                        Should contain            ${result}         upload
+    ${result} =         Execute AWSS3Cli          ls s3://${BUCKET}
+                        Should contain            ${result}         testfile
+                        Should contain            ${result}         dir1
+                        Should not contain        ${result}         dir2
+    ${result} =         Execute AWSS3Cli          ls s3://${BUCKET}/dir1/
+                        Should not contain        ${result}         testfile
+                        Should not contain        ${result}         dir1
+                        Should contain            ${result}         dir2
+    ${result} =         Execute AWSS3Cli          ls s3://${BUCKET}/dir1/dir2/
+                        Should not contain        ${result}         testfile
+                        Should not contain        ${result}         dir1
+                        Should contain            ${result}         file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot
new file mode 100644
index 0000000..2ce5002
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/buckethead.robot
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       S3 gateway test with aws cli
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            commonawslib.robot
+Test Setup          Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL}       http://s3g:9878
+${BUCKET}             generated
+
+*** Test Cases ***
+
+Head Bucket not existent
+    ${result} =         Execute AWSS3APICli     head-bucket --bucket ${BUCKET}
+    ${result} =         Execute AWSS3APICli and checkrc      head-bucket --bucket ozonenosuchbucketqqweqwe  255
+                        Should contain          ${result}    Bad Request
+                        Should contain          ${result}    400

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot
new file mode 100644
index 0000000..4fe9b65
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/bucketlist.robot
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       S3 gateway test with aws cli
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            commonawslib.robot
+Test Setup          Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL}       http://s3g:9878
+${BUCKET}             generated
+
+*** Test Cases ***
+
+List buckets
+    ${result} =         Execute AWSS3APICli     list-buckets | jq -r '.Buckets[].Name'
+                        Should contain          ${result}    ${BUCKET}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot
deleted file mode 100644
index f17189b..0000000
--- a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv2.robot
+++ /dev/null
@@ -1,66 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-*** Settings ***
-Documentation       S3 gateway test with aws cli for bucket operations
-Library             String
-Library             OperatingSystem
-Resource            commonawslib.robot
-
-*** Variables ***
-${ENDPOINT_URL}       http://s3g:9878
-${OZONE_TEST}         true
-${BUCKET}             generated
-${NONEXIST-BUCKET}    generated1
-*** Keywords ***
-
-Install aws s3 cli
-                        Execute                    sudo apt-get install -y awscli
-                        Set Environment Variable   AWS_ACCESS_KEY_ID       default
-                        Set Environment Variable   AWS_SECRET_ACCESS_KEY   defaultsecret
-    ${postfix1} =       Generate Random String  5  [NUMBERS]
-    Set Suite Variable  ${BUCKET}                  bucket-${postfix1}
-
-Check Volume
-    # as we know bucket to volume map. Volume name  bucket mapped is s3 + AWS_ACCESS_KEY_ID
-    ${result} =         Execute                     ozone sh volume info /s3default
-                        Should contain              ${result}         s3default
-                        Should not contain          ${result}         VOLUME_NOT_FOUND
-
-*** Test Cases ***
-
-Setup s3 Tests
-    Run Keyword if    '${OZONE_TEST}' == 'true'    Install aws s3 cli
-
-Create Bucket
-    ${result} =         Execute AWSS3APICli         create-bucket --bucket ${BUCKET}
-                        Should contain              ${result}         ${BUCKET}
-                        Should contain              ${result}         Location
-    # create an already existing bucket
-    ${result} =         Execute AWSS3APICli         create-bucket --bucket ${BUCKET}
-                        Should contain              ${result}         ${BUCKET}
-                        Should contain              ${result}         Location
-
-    Run Keyword if     '${OZONE_TEST}' == 'true'    Check Volume
-
-Head Bucket
-    ${result} =         Execute AWSS3APICli     head-bucket --bucket ${BUCKET}
-    ${result} =         Execute AWSS3APICli and checkrc      head-bucket --bucket ${NONEXIST-BUCKET}  255
-                        Should contain          ${result}    Not Found
-                        Should contain          ${result}    404
-Delete Bucket
-    ${result} =         Execute AWSS3APICli     head-bucket --bucket ${BUCKET}
-    ${result} =         Execute AWSS3APICli and checkrc      delete-bucket --bucket ${NONEXIST-BUCKET}  255
-                        Should contain          ${result}    NoSuchBucket
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot b/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot
deleted file mode 100644
index 1a93690..0000000
--- a/hadoop-ozone/dist/src/main/smoketest/s3/bucketv4.robot
+++ /dev/null
@@ -1,71 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-*** Settings ***
-Documentation       S3 gateway test with aws cli for bucket operations
-Library             String
-Library             OperatingSystem
-Resource            commonawslib.robot
-
-*** Variables ***
-${ENDPOINT_URL}       http://s3g:9878
-${OZONE_TEST}         true
-${BUCKET}             generated
-${NONEXIST-BUCKET}    generated1
-
-*** Keywords ***
-
-Install aws s3 cli
-                        Execute                    sudo apt-get install -y awscli
-                        Remove Environment Variable    AWS_ACCESS_KEY_ID
-                        Remove Environment Variable    AWS_SECRET_ACCESS_KEY
-                        Execute                    aws configure set default.s3.signature_version s3v4
-                        Execute                    aws configure set aws_access_key_id default1
-                        Execute                    aws configure set aws_secret_access_key defaultsecret
-                        Execute                    aws configure set region us-west-1
-    ${postfix1} =       Generate Random String  5  [NUMBERS]
-    Set Suite Variable  ${BUCKET}                  bucket-${postfix1}
-
-Check Volume
-    # as we know bucket to volume map. Volume name  bucket mapped is s3 + AWS_ACCESS_KEY_ID
-    ${result} =         Execute                     ozone sh volume info /s3default1
-                        Should contain              ${result}         s3default1
-                        Should not contain          ${result}         VOLUME_NOT_FOUND
-
-*** Test Cases ***
-
-Setup s3 Tests
-    Run Keyword if    '${OZONE_TEST}' == 'true'    Install aws s3 cli
-
-Create Bucket
-    ${result} =         Execute AWSS3APICli         create-bucket --bucket ${BUCKET}
-                        Should contain              ${result}         ${BUCKET}
-                        Should contain              ${result}         Location
-    # create an already existing bucket
-    ${result} =         Execute AWSS3APICli         create-bucket --bucket ${BUCKET}
-                        Should contain              ${result}         ${BUCKET}
-                        Should contain              ${result}         Location
-
-    Run Keyword if     '${OZONE_TEST}' == 'true'    Check Volume
-
-Head Bucket
-    ${result} =         Execute AWSS3APICli     head-bucket --bucket ${BUCKET}
-    ${result} =         Execute AWSS3APICli and checkrc      head-bucket --bucket ${NONEXIST-BUCKET}  255
-                        Should contain          ${result}    Not Found
-                        Should contain          ${result}    404
-Delete Bucket
-    ${result} =         Execute AWSS3APICli     head-bucket --bucket ${BUCKET}
-    ${result} =         Execute AWSS3APICli and checkrc      delete-bucket --bucket ${NONEXIST-BUCKET}  255
-                        Should contain          ${result}    NoSuchBucket
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot
index 07fa667..f426145 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/commonawslib.robot
@@ -16,6 +16,10 @@
 *** Settings ***
 Resource            ../commonlib.robot
 
+*** Variables ***
+${OZONE_S3_HEADER_VERSION}     v2
+${OZONE_S3_SET_CREDENTIALS}    true
+
 *** Keywords ***
 Execute AWSS3APICli
     [Arguments]       ${command}
@@ -23,6 +27,41 @@ Execute AWSS3APICli
     [return]          ${output}
 
 Execute AWSS3APICli and checkrc
-    [Arguments]       ${command}          ${expected_error_code}
+    [Arguments]       ${command}             ${expected_error_code}
     ${output} =       Execute and checkrc    aws s3api --endpoint-url ${ENDPOINT_URL} ${command}  ${expected_error_code}
-    [return]          ${output}
\ No newline at end of file
+    [return]          ${output}
+
+Execute AWSS3Cli
+    [Arguments]       ${command}
+    ${output} =       Execute          aws s3 --endpoint-url ${ENDPOINT_URL} ${command}
+    [return]          ${output}
+
+Install aws cli
+    ${rc}              ${output} =                 Run And Return Rc And Output           which apt-get
+    Run Keyword if     '${rc}' == '0'              Install aws cli s3 debian
+
+Install aws cli s3 debian
+    Execute                    sudo apt-get install -y awscli
+
+Setup v2 headers
+                        Set Environment Variable   AWS_ACCESS_KEY_ID       ANYID
+                        Set Environment Variable   AWS_SECRET_ACCESS_KEY   ANYKEY
+
+Setup v4 headers
+                        Execute                    aws configure set default.s3.signature_version s3v4
+                        Execute                    aws configure set aws_access_key_id default1
+                        Execute                    aws configure set aws_secret_access_key defaultsecret
+                        Execute                    aws configure set region us-west-1
+Create bucket
+    ${postfix} =         Generate Random String  5  [NUMBERS]
+    Set Suite Variable   ${BUCKET}                  bucket-${postfix}
+    Execute AWSS3APICli  create-bucket --bucket ${BUCKET}
+
+Setup credentials
+    Run Keyword if    '${OZONE_S3_HEADER_VERSION}' == 'v4'       Setup v4 headers
+    Run Keyword if    '${OZONE_S3_HEADER_VERSION}' != 'v4'       Setup v2 headers
+
+Setup s3 tests
+    Run Keyword        Install aws cli
+    Run Keyword if    '${OZONE_S3_SET_CREDENTIALS}' == 'true'    Setup credentials
+    Run Keyword if    '${BUCKET}' == 'generated'                 Create bucket

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot
new file mode 100644
index 0000000..9e57d50
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectdelete.robot
@@ -0,0 +1,72 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       S3 gateway test with aws cli
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            commonawslib.robot
+Test Setup          Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL}       http://s3g:9878
+${BUCKET}             generated
+
+*** Test Cases ***
+Delete file with s3api
+                        Execute                    date > /tmp/testfile
+    ${result} =         Execute AWSS3ApiCli        put-object --bucket ${BUCKET} --key deletetestapi/f1 --body /tmp/testfile
+    ${result} =         Execute AWSS3ApiCli        list-objects --bucket ${BUCKET} --prefix deletetestapi/
+                        Should contain             ${result}         f1
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key deletetestapi/f1
+    ${result} =         Execute AWSS3ApiCli        list-objects --bucket ${BUCKET} --prefix deletetestapi/
+                        Should not contain         ${result}         f1
+#In case of HTTP 500, the error code is printed out to the console.
+                        Should not contain         ${result}         500
+
+Delete file with s3api, file doesn't exist
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/
+                        Should not contain         ${result}         thereisnosuchfile
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key thereisnosuchfile
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/
+                        Should not contain         ${result}         thereisnosuchfile
+
+Delete dir with s3api
+                        Execute                    date > /tmp/testfile
+    ${result} =         Execute AWSS3Cli           cp /tmp/testfile s3://${BUCKET}/deletetestapidir/f1
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/deletetestapidir/
+                        Should contain             ${result}         f1
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key deletetestapidir/
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/deletetestapidir/
+                        Should contain             ${result}         f1
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key deletetestapidir/f1
+
+
+Delete file with s3api, file doesn't exist, prefix of a real file
+                        Execute                    date > /tmp/testfile
+    ${result} =         Execute AWSS3Cli           cp /tmp/testfile s3://${BUCKET}/deletetestapiprefix/filefile
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/deletetestapiprefix/
+                        Should contain             ${result}         filefile
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key deletetestapiprefix/file
+    ${result} =         Execute AWSS3Cli           ls s3://${BUCKET}/deletetestapiprefix/
+                        Should contain             ${result}         filefile
+    ${result} =         Execute AWSS3APICli        delete-object --bucket ${BUCKET} --key deletetestapiprefix/filefile
+
+
+
+Delete file with s3api, bucket doesn't exist
+    ${result} =         Execute AWSS3APICli and checkrc   delete-object --bucket ${BUCKET}-nosuchbucket --key f1      255
+                        Should contain                    ${result}         NoSuchBucket

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot
new file mode 100644
index 0000000..858e472
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/objectputget.robot
@@ -0,0 +1,42 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation       S3 gateway test with aws cli
+Library             OperatingSystem
+Library             String
+Resource            ../commonlib.robot
+Resource            commonawslib.robot
+Test Setup          Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL}       http://s3g:9878
+${OZONE_TEST}         true
+${BUCKET}             generated
+
+*** Test Cases ***
+
+Put object to s3
+                        Execute                    date > /tmp/testfile
+    ${result} =         Execute AWSS3ApiCli        put-object --bucket ${BUCKET} --key putobject/f1 --body /tmp/testfile
+    ${result} =         Execute AWSS3ApiCli        list-objects --bucket ${BUCKET} --prefix putobject/
+                        Should contain             ${result}         f1
+
+#This test depends on the previous test case. Can't be executes alone
+Get object from s3
+    ${result} =                 Execute AWSS3ApiCli        get-object --bucket ${BUCKET} --key putobject/f1 /tmp/testfile.result
+    ${checksumbefore} =         Execute                    md5sum /tmp/testfile | awk '{print $1}'
+    ${checksumafter} =          Execute                    md5sum /tmp/testfile.result | awk '{print $1}'
+                                Should Be Equal            ${checksumbefore}            ${checksumafter}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/dist/src/main/smoketest/test.sh
----------------------------------------------------------------------
diff --git a/hadoop-ozone/dist/src/main/smoketest/test.sh b/hadoop-ozone/dist/src/main/smoketest/test.sh
index a6dc1f1..59903c6 100755
--- a/hadoop-ozone/dist/src/main/smoketest/test.sh
+++ b/hadoop-ozone/dist/src/main/smoketest/test.sh
@@ -43,9 +43,10 @@ execute_tests(){
   for TEST in "${TESTS[@]}"; do
      TITLE="Ozone $TEST tests with $COMPOSE_DIR cluster"
      set +e
-     docker-compose -f "$COMPOSE_FILE" exec datanode python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$COMPOSE_DIR-${TEST//\//_/}.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
+     OUTPUT_NAME="$COMPOSE_DIR-${TEST//\//_}"
+	  docker-compose -f "$COMPOSE_FILE" exec datanode python -m robot --log NONE --report NONE "${OZONE_ROBOT_OPTS[@]}" --output "smoketest/$RESULT_DIR/robot-$OUTPUT_NAME.xml" --logtitle "$TITLE" --reporttitle "$TITLE" "smoketest/$TEST"
      set -e
-     docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$COMPOSE_DIR-${TEST//\//_/}.log"
+     docker-compose -f "$COMPOSE_FILE" logs > "$DIR/$RESULT_DIR/docker-$OUTPUT_NAME.log"
   done
   if [ "$KEEP_RUNNING" = false ]; then
      docker-compose -f "$COMPOSE_FILE" down

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java
deleted file mode 100644
index daa75a9..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/EndpointBase.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.s3;
-
-import javax.inject.Inject;
-import javax.ws.rs.NotFoundException;
-import java.io.IOException;
-
-import org.apache.hadoop.ozone.client.OzoneBucket;
-import org.apache.hadoop.ozone.client.OzoneClient;
-import org.apache.hadoop.ozone.client.OzoneVolume;
-import org.apache.hadoop.ozone.s3.exception.OS3Exception;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable.Resource;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Basic helpers for all the REST endpoints.
- */
-public class EndpointBase {
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(EndpointBase.class);
-  @Inject
-  private OzoneClient client;
-
-  protected OzoneBucket getBucket(String volumeName, String bucketName)
-      throws IOException {
-    return getVolume(volumeName).getBucket(bucketName);
-  }
-
-  protected OzoneBucket getBucket(OzoneVolume volume, String bucketName)
-      throws OS3Exception, IOException {
-    OzoneBucket bucket;
-    try {
-      bucket = volume.getBucket(bucketName);
-    } catch (IOException ex) {
-      LOG.error("Error occurred is {}", ex);
-      if (ex.getMessage().contains("NOT_FOUND")) {
-        OS3Exception oex =
-            S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, Resource.BUCKET);
-        throw oex;
-      } else {
-        throw ex;
-      }
-    }
-    return bucket;
-  }
-
-  protected OzoneVolume getVolume(String volumeName) throws IOException {
-    OzoneVolume volume = null;
-    try {
-      volume = client.getObjectStore().getVolume(volumeName);
-    } catch (Exception ex) {
-      if (ex.getMessage().contains("NOT_FOUND")) {
-        throw new NotFoundException("Volume " + volumeName + " is not found");
-      } else {
-        throw ex;
-      }
-    }
-    return volume;
-  }
-
-  /**
-   * Create an S3Bucket, and also it creates mapping needed to access via
-   * ozone and S3.
-   * @param userName
-   * @param bucketName
-   * @return location of the S3Bucket.
-   * @throws IOException
-   */
-  protected String createS3Bucket(String userName, String bucketName) throws
-      IOException {
-    try {
-      client.getObjectStore().createS3Bucket(userName, bucketName);
-    } catch (IOException ex) {
-      LOG.error("createS3Bucket error:", ex);
-      if (!ex.getMessage().contains("ALREADY_EXISTS")) {
-        // S3 does not return error for bucket already exists, it just
-        // returns the location.
-        throw ex;
-      }
-    }
-
-    // Not required to call as bucketname is same, but calling now in future
-    // if mapping changes we get right location.
-    String location = client.getObjectStore().getOzoneBucketName(
-        bucketName);
-    return "/"+location;
-  }
-
-  /**
-   * Deletes an s3 bucket and removes mapping of Ozone volume/bucket.
-   * @param s3BucketName - S3 Bucket Name.
-   * @throws  IOException in case the bucket cannot be deleted.
-   */
-  public void deleteS3Bucket(String s3BucketName)
-      throws IOException {
-    client.getObjectStore().deleteS3Bucket(s3BucketName);
-  }
-
-  /**
-   * Returns the Ozone Namespace for the S3Bucket. It will return the
-   * OzoneVolume/OzoneBucketName.
-   * @param s3BucketName  - S3 Bucket Name.
-   * @return String - The Ozone canonical name for this s3 bucket. This
-   * string is useful for mounting an OzoneFS.
-   * @throws IOException - Error is throw if the s3bucket does not exist.
-   */
-  public String getOzoneBucketMapping(String s3BucketName) throws IOException {
-    return client.getObjectStore().getOzoneBucketMapping(s3BucketName);
-  }
-
-  /**
-   * Returns the corresponding Ozone volume given an S3 Bucket.
-   * @param s3BucketName - S3Bucket Name.
-   * @return String - Ozone Volume name.
-   * @throws IOException - Throws if the s3Bucket does not exist.
-   */
-  public String getOzoneVolumeName(String s3BucketName) throws IOException {
-    return client.getObjectStore().getOzoneVolumeName(s3BucketName);
-  }
-
-  /**
-   * Returns the corresponding Ozone bucket name for the given S3 bucket.
-   * @param s3BucketName - S3Bucket Name.
-   * @return String - Ozone bucket Name.
-   * @throws IOException - Throws if the s3bucket does not exist.
-   */
-  public String getOzoneBucketName(String s3BucketName) throws IOException {
-    return client.getObjectStore().getOzoneBucketName(s3BucketName);
-  }
-
-
-  @VisibleForTesting
-  public void setClient(OzoneClient ozoneClient) {
-    this.client = ozoneClient;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java
deleted file mode 100644
index 42885e2..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/DeleteBucket.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.ozone.s3.bucket;
-
-import javax.ws.rs.DELETE;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-
-import org.apache.hadoop.ozone.s3.EndpointBase;
-import org.apache.hadoop.ozone.s3.exception.OS3Exception;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-
-import org.apache.http.HttpStatus;
-
-/**
- * Delete a bucket.
- */
-@Path("/{bucket}")
-public class DeleteBucket extends EndpointBase {
-
-  @DELETE
-  @Produces(MediaType.APPLICATION_XML)
-  public Response delete(@PathParam("bucket") String bucketName)
-      throws IOException, OS3Exception {
-
-    try {
-      deleteS3Bucket(bucketName);
-    } catch (IOException ex) {
-      if (ex.getMessage().contains("BUCKET_NOT_EMPTY")) {
-        OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
-            .BUCKET_NOT_EMPTY, S3ErrorTable.Resource.BUCKET);
-        throw os3Exception;
-      } else if (ex.getMessage().contains("BUCKET_NOT_FOUND")) {
-        OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
-            .NO_SUCH_BUCKET, S3ErrorTable.Resource.BUCKET);
-        throw os3Exception;
-      } else {
-        throw ex;
-      }
-    }
-
-    return Response
-        .status(HttpStatus.SC_NO_CONTENT)
-        .build();
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java
deleted file mode 100644
index 5ddc78c..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/HeadBucket.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.ozone.s3.bucket;
-
-import javax.ws.rs.HEAD;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-
-import org.apache.hadoop.ozone.s3.EndpointBase;
-import org.apache.hadoop.ozone.s3.exception.OS3Exception;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable.Resource;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Finds the bucket exists or not.
- */
-@Path("/{bucket}")
-public class HeadBucket extends EndpointBase {
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(HeadBucket.class);
-
-  @HEAD
-  public Response head(@PathParam("bucket") String bucketName)
-      throws Exception {
-    try {
-      getVolume(getOzoneVolumeName(bucketName)).getBucket(bucketName);
-    } catch (IOException ex) {
-      LOG.error("Exception occurred in headBucket", ex);
-      if (ex.getMessage().contains("NOT_FOUND")) {
-        OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
-                .NO_SUCH_BUCKET, Resource.BUCKET);
-        throw os3Exception;
-      } else {
-        throw ex;
-      }
-    }
-    return Response.ok().build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java
deleted file mode 100644
index 962b8a6..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucket.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.s3.bucket;
-
-import org.apache.hadoop.ozone.client.OzoneBucket;
-import org.apache.hadoop.ozone.client.OzoneVolume;
-import org.apache.hadoop.ozone.s3.EndpointBase;
-import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata;
-import org.apache.hadoop.ozone.s3.exception.OS3Exception;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.*;
-import javax.ws.rs.core.MediaType;
-import java.io.IOException;
-import java.time.Instant;
-import java.util.Iterator;
-
-/**
- * List Object Rest endpoint.
- */
-@Path("/{volume}")
-public class ListBucket extends EndpointBase {
-  private static final Logger LOG =
-      LoggerFactory.getLogger(ListBucket.class);
-
-  @GET
-  @Produces(MediaType.APPLICATION_XML)
-  public ListBucketResponse get(@PathParam("volume") String volumeName)
-      throws OS3Exception, IOException {
-    OzoneVolume volume;
-    try {
-      volume = getVolume(volumeName);
-    } catch (NotFoundException ex) {
-      LOG.error("Exception occurred in ListBucket: volume {} not found.",
-          volumeName, ex);
-      OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
-          .NO_SUCH_VOLUME, S3ErrorTable.Resource.VOLUME);
-      throw os3Exception;
-    } catch (IOException e) {
-      throw e;
-    }
-
-    Iterator<? extends OzoneBucket> volABucketIter = volume.listBuckets(null);
-    ListBucketResponse response = new ListBucketResponse();
-
-    while(volABucketIter.hasNext()) {
-      OzoneBucket next = volABucketIter.next();
-      BucketMetadata bucketMetadata = new BucketMetadata();
-      bucketMetadata.setName(next.getName());
-      bucketMetadata.setCreationDate(
-          Instant.ofEpochMilli(next.getCreationTime()));
-      response.addBucket(bucketMetadata);
-    }
-
-    return response;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java
deleted file mode 100644
index 1f117dd..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/ListBucketResponse.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.ozone.s3.bucket;
-
-import com.google.common.annotations.VisibleForTesting;
-import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata;
-
-import javax.xml.bind.annotation.*;
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Response from the ListBucket RPC Call.
- */
-@XmlAccessorType(XmlAccessType.FIELD)
-@XmlRootElement(name = "ListAllMyBucketsResult",
-    namespace = "http://s3.amazonaws.com/doc/2006-03-01/")
-public class ListBucketResponse {
-  @XmlElementWrapper(name = "Buckets")
-  @XmlElement(name = "Bucket")
-  private List<BucketMetadata> buckets = new ArrayList<>();
-
-  public List<BucketMetadata> getBuckets() {
-    return buckets;
-  }
-
-  @VisibleForTesting
-  public int getBucketsNum() {
-    return buckets.size();
-  }
-
-  public void setBuckets(List<BucketMetadata> buckets) {
-    this.buckets = buckets;
-  }
-
-  public void addBucket(BucketMetadata bucket) {
-    buckets.add(bucket);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java
deleted file mode 100644
index bdb3c59..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/PutBucket.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.ozone.s3.bucket;
-
-import javax.ws.rs.PUT;
-import javax.ws.rs.Path;
-import javax.ws.rs.PathParam;
-import javax.ws.rs.core.Context;
-import javax.ws.rs.core.HttpHeaders;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-
-import org.apache.hadoop.ozone.s3.EndpointBase;
-import org.apache.hadoop.ozone.s3.exception.OS3Exception;
-import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV2;
-import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV4;
-import org.apache.http.HttpStatus;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * Create new bucket.
- */
-@Path("/{bucket}")
-public class PutBucket extends EndpointBase {
-
-  private static final Logger LOG =
-      LoggerFactory.getLogger(PutBucket.class);
-
-  @PUT
-  public Response put(@PathParam("bucket") String bucketName, @Context
-                  HttpHeaders httpHeaders) throws IOException, OS3Exception {
-
-    String auth = httpHeaders.getHeaderString("Authorization");
-    LOG.info("Auth header string {}", auth);
-
-    if (auth == null) {
-      throw S3ErrorTable.newError(S3ErrorTable.MALFORMED_HEADER, S3ErrorTable
-          .Resource.HEADER);
-    }
-
-    String userName;
-    if (auth.startsWith("AWS4")) {
-      LOG.info("V4 Header {}", auth);
-      AuthorizationHeaderV4 authorizationHeader = new AuthorizationHeaderV4(
-          auth);
-      userName = authorizationHeader.getAccessKeyID().toLowerCase();
-    } else {
-      LOG.info("V2 Header {}", auth);
-      AuthorizationHeaderV2 authorizationHeader = new AuthorizationHeaderV2(
-          auth);
-      userName = authorizationHeader.getAccessKeyID().toLowerCase();
-    }
-
-    String location = createS3Bucket(userName, bucketName);
-
-    LOG.info("Location is {}", location);
-    return Response.status(HttpStatus.SC_OK).header("Location", location)
-       .build();
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java
deleted file mode 100644
index c099c69..0000000
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/bucket/package-info.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/**
- * Rest endpoint implementation for the bucket specific methods.
- */
-@javax.xml.bind.annotation.XmlSchema(
-    namespace = "http://s3.amazonaws"
-        + ".com/doc/2006-03-01/", elementFormDefault =
-    javax.xml.bind.annotation.XmlNsForm.QUALIFIED,
-    xmlns = {
-        @javax.xml.bind.annotation.XmlNs(namespaceURI = "http://s3.amazonaws"
-            + ".com/doc/2006-03-01/", prefix = "")})
-
-package org.apache.hadoop.ozone.s3.bucket;
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java
index 281e00b..cb04870 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/commontypes/IsoDateAdapter.java
@@ -31,7 +31,7 @@ public class IsoDateAdapter extends XmlAdapter<String, Instant> {
 
   public IsoDateAdapter() {
     iso8861Formatter =
-        DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mmX")
+        DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX")
             .withZone(ZoneOffset.UTC);
   }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java
new file mode 100644
index 0000000..7a7c92d
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/BucketEndpoint.java
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.s3.endpoint;
+
+import javax.ws.rs.DELETE;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.HEAD;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.Response.Status;
+import java.io.IOException;
+import java.time.Instant;
+import java.util.Iterator;
+
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneKey;
+import org.apache.hadoop.ozone.s3.commontypes.KeyMetadata;
+import org.apache.hadoop.ozone.s3.exception.OS3Exception;
+import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.http.HttpStatus;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Bucket level rest endpoints.
+ */
+@Path("/{bucket}")
+public class BucketEndpoint extends EndpointBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(BucketEndpoint.class);
+
+  /**
+   * Rest endpoint to list objects in a specific bucket.
+   * <p>
+   * See: https://docs.aws.amazon.com/AmazonS3/latest/API/v2-RESTBucketGET.html
+   * for more details.
+   */
+  @GET
+  public ListObjectResponse list(
+      @PathParam("bucket") String bucketName,
+      @QueryParam("delimiter") String delimiter,
+      @QueryParam("encoding-type") String encodingType,
+      @QueryParam("marker") String marker,
+      @DefaultValue("1000") @QueryParam("max-keys") int maxKeys,
+      @QueryParam("prefix") String prefix,
+      @Context HttpHeaders hh) throws OS3Exception, IOException {
+
+    if (delimiter == null) {
+      delimiter = "/";
+    }
+    if (prefix == null) {
+      prefix = "";
+    }
+
+    OzoneBucket bucket = getBucket(bucketName);
+
+    Iterator<? extends OzoneKey> ozoneKeyIterator = bucket.listKeys(prefix);
+
+    ListObjectResponse response = new ListObjectResponse();
+    response.setDelimiter(delimiter);
+    response.setName(bucketName);
+    response.setPrefix(prefix);
+    response.setMarker("");
+    response.setMaxKeys(1000);
+    response.setEncodingType("url");
+    response.setTruncated(false);
+
+    String prevDir = null;
+    while (ozoneKeyIterator.hasNext()) {
+      OzoneKey next = ozoneKeyIterator.next();
+      String relativeKeyName = next.getName().substring(prefix.length());
+
+      int depth =
+          StringUtils.countMatches(relativeKeyName, delimiter);
+
+      if (prefix.length() > 0 && !prefix.endsWith(delimiter)
+          && relativeKeyName.length() > 0) {
+        response.addPrefix(prefix + "/");
+        break;
+      }
+      if (depth > 0) {
+        String dirName = relativeKeyName
+            .substring(0, relativeKeyName.indexOf(delimiter));
+        if (!dirName.equals(prevDir)) {
+          response.addPrefix(
+              prefix + dirName + delimiter);
+          prevDir = dirName;
+        }
+      } else if (relativeKeyName.endsWith(delimiter)) {
+        response.addPrefix(relativeKeyName);
+      } else if (relativeKeyName.length() > 0) {
+        KeyMetadata keyMetadata = new KeyMetadata();
+        keyMetadata.setKey(next.getName());
+        keyMetadata.setSize(next.getDataSize());
+        keyMetadata.setETag("" + next.getModificationTime());
+        keyMetadata.setStorageClass("STANDARD");
+        keyMetadata
+            .setLastModified(Instant.ofEpochMilli(next.getModificationTime()));
+        response.addKey(keyMetadata);
+      }
+    }
+    response.setKeyCount(
+        response.getCommonPrefixes().size() + response.getContents().size());
+    return response;
+  }
+
+  @PUT
+  public Response put(@PathParam("bucket") String bucketName, @Context
+      HttpHeaders httpHeaders) throws IOException, OS3Exception {
+
+    String userName = parseUsername(httpHeaders);
+
+    String location = createS3Bucket(userName, bucketName);
+
+    LOG.info("Location is {}", location);
+    return Response.status(HttpStatus.SC_OK).header("Location", location)
+        .build();
+
+  }
+
+  /**
+   * Rest endpoint to check the existence of a bucket.
+   * <p>
+   * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketHEAD.html
+   * for more details.
+   */
+  @HEAD
+  public Response head(@PathParam("bucket") String bucketName)
+      throws OS3Exception, IOException {
+    try {
+      getBucket(bucketName);
+    } catch (OS3Exception ex) {
+      LOG.error("Exception occurred in headBucket", ex);
+      //TODO: use a subclass fo OS3Exception and catch it here.
+      if (ex.getCode().contains("NoSuchBucket")) {
+        return Response.status(Status.BAD_REQUEST).build();
+      } else {
+        throw ex;
+      }
+    }
+    return Response.ok().build();
+  }
+
+  /**
+   * Rest endpoint to delete specific bucket.
+   * <p>
+   * See: https://docs.aws.amazon.com/AmazonS3/latest/API/RESTBucketDELETE.html
+   * for more details.
+   */
+  @DELETE
+  public Response delete(@PathParam("bucket") String bucketName)
+      throws IOException, OS3Exception {
+
+    try {
+      deleteS3Bucket(bucketName);
+    } catch (IOException ex) {
+      if (ex.getMessage().contains("BUCKET_NOT_EMPTY")) {
+        OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
+            .BUCKET_NOT_EMPTY, S3ErrorTable.Resource.BUCKET);
+        throw os3Exception;
+      } else if (ex.getMessage().contains("BUCKET_NOT_FOUND")) {
+        OS3Exception os3Exception = S3ErrorTable.newError(S3ErrorTable
+            .NO_SUCH_BUCKET, S3ErrorTable.Resource.BUCKET);
+        throw os3Exception;
+      } else {
+        throw ex;
+      }
+    }
+
+    return Response
+        .status(HttpStatus.SC_NO_CONTENT)
+        .build();
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
new file mode 100644
index 0000000..61f066c
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/EndpointBase.java
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ozone.s3.endpoint;
+
+import javax.inject.Inject;
+import javax.ws.rs.NotFoundException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.HttpHeaders;
+import java.io.IOException;
+
+import org.apache.hadoop.ozone.client.OzoneBucket;
+import org.apache.hadoop.ozone.client.OzoneClient;
+import org.apache.hadoop.ozone.client.OzoneVolume;
+import org.apache.hadoop.ozone.s3.exception.OS3Exception;
+import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+import org.apache.hadoop.ozone.s3.exception.S3ErrorTable.Resource;
+import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV2;
+import org.apache.hadoop.ozone.s3.header.AuthorizationHeaderV4;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Basic helpers for all the REST endpoints.
+ */
+public class EndpointBase {
+
+  private static final Logger LOG =
+      LoggerFactory.getLogger(EndpointBase.class);
+
+  @Inject
+  private OzoneClient client;
+
+  protected OzoneBucket getBucket(String volumeName, String bucketName)
+      throws IOException {
+    return getVolume(volumeName).getBucket(bucketName);
+  }
+
+  protected OzoneBucket getBucket(OzoneVolume volume, String bucketName)
+      throws OS3Exception, IOException {
+    OzoneBucket bucket;
+    try {
+      bucket = volume.getBucket(bucketName);
+    } catch (IOException ex) {
+      LOG.error("Error occurred is {}", ex);
+      if (ex.getMessage().contains("NOT_FOUND")) {
+        OS3Exception oex =
+            S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, Resource.BUCKET);
+        throw oex;
+      } else {
+        throw ex;
+      }
+    }
+    return bucket;
+  }
+
+  protected OzoneBucket getBucket(String bucketName)
+      throws OS3Exception, IOException {
+    OzoneBucket bucket;
+    try {
+      OzoneVolume volume = getVolume(getOzoneVolumeName(bucketName));
+      bucket = volume.getBucket(bucketName);
+    } catch (IOException ex) {
+      LOG.error("Error occurred is {}", ex);
+      if (ex.getMessage().contains("NOT_FOUND")) {
+        OS3Exception oex =
+            S3ErrorTable.newError(S3ErrorTable.NO_SUCH_BUCKET, Resource.BUCKET);
+        throw oex;
+      } else {
+        throw ex;
+      }
+    }
+    return bucket;
+  }
+
+  protected OzoneVolume getVolume(String volumeName) throws IOException {
+    OzoneVolume volume = null;
+    try {
+      volume = client.getObjectStore().getVolume(volumeName);
+    } catch (Exception ex) {
+      if (ex.getMessage().contains("NOT_FOUND")) {
+        throw new NotFoundException("Volume " + volumeName + " is not found");
+      } else {
+        throw ex;
+      }
+    }
+    return volume;
+  }
+
+  /**
+   * Create an S3Bucket, and also it creates mapping needed to access via
+   * ozone and S3.
+   * @param userName
+   * @param bucketName
+   * @return location of the S3Bucket.
+   * @throws IOException
+   */
+  protected String createS3Bucket(String userName, String bucketName) throws
+      IOException {
+    try {
+      client.getObjectStore().createS3Bucket(userName, bucketName);
+    } catch (IOException ex) {
+      LOG.error("createS3Bucket error:", ex);
+      if (!ex.getMessage().contains("ALREADY_EXISTS")) {
+        // S3 does not return error for bucket already exists, it just
+        // returns the location.
+        throw ex;
+      }
+    }
+
+    // Not required to call as bucketname is same, but calling now in future
+    // if mapping changes we get right location.
+    String location = client.getObjectStore().getOzoneBucketName(
+        bucketName);
+    return "/"+location;
+  }
+
+  /**
+   * Deletes an s3 bucket and removes mapping of Ozone volume/bucket.
+   * @param s3BucketName - S3 Bucket Name.
+   * @throws  IOException in case the bucket cannot be deleted.
+   */
+  public void deleteS3Bucket(String s3BucketName)
+      throws IOException {
+    client.getObjectStore().deleteS3Bucket(s3BucketName);
+  }
+
+  /**
+   * Returns the Ozone Namespace for the S3Bucket. It will return the
+   * OzoneVolume/OzoneBucketName.
+   * @param s3BucketName  - S3 Bucket Name.
+   * @return String - The Ozone canonical name for this s3 bucket. This
+   * string is useful for mounting an OzoneFS.
+   * @throws IOException - Error is throw if the s3bucket does not exist.
+   */
+  public String getOzoneBucketMapping(String s3BucketName) throws IOException {
+    return client.getObjectStore().getOzoneBucketMapping(s3BucketName);
+  }
+
+  /**
+   * Returns the corresponding Ozone volume given an S3 Bucket.
+   * @param s3BucketName - S3Bucket Name.
+   * @return String - Ozone Volume name.
+   * @throws IOException - Throws if the s3Bucket does not exist.
+   */
+  public String getOzoneVolumeName(String s3BucketName) throws IOException {
+    return client.getObjectStore().getOzoneVolumeName(s3BucketName);
+  }
+
+  /**
+   * Returns the corresponding Ozone bucket name for the given S3 bucket.
+   * @param s3BucketName - S3Bucket Name.
+   * @return String - Ozone bucket Name.
+   * @throws IOException - Throws if the s3bucket does not exist.
+   */
+  public String getOzoneBucketName(String s3BucketName) throws IOException {
+    return client.getObjectStore().getOzoneBucketName(s3BucketName);
+  }
+
+  /**
+   * Retrieve the username based on the authorization header.
+   *
+   * @param httpHeaders
+   * @return Identified username
+   * @throws OS3Exception
+   */
+  public String parseUsername(
+      @Context HttpHeaders httpHeaders) throws OS3Exception {
+    String auth = httpHeaders.getHeaderString("Authorization");
+    LOG.info("Auth header string {}", auth);
+
+    if (auth == null) {
+      throw S3ErrorTable
+          .newError(S3ErrorTable.MALFORMED_HEADER, Resource.HEADER);
+    }
+
+    String userName;
+    if (auth.startsWith("AWS4")) {
+      LOG.info("V4 Header {}", auth);
+      AuthorizationHeaderV4 authorizationHeader = new AuthorizationHeaderV4(
+          auth);
+      userName = authorizationHeader.getAccessKeyID().toLowerCase();
+    } else {
+      LOG.info("V2 Header {}", auth);
+      AuthorizationHeaderV2 authorizationHeader = new AuthorizationHeaderV2(
+          auth);
+      userName = authorizationHeader.getAccessKeyID().toLowerCase();
+    }
+    return userName;
+  }
+
+  @VisibleForTesting
+  public void setClient(OzoneClient ozoneClient) {
+    this.client = ozoneClient;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java
new file mode 100644
index 0000000..b9f8702
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListBucketResponse.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3.endpoint;
+
+import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.ozone.s3.commontypes.BucketMetadata;
+
+import javax.xml.bind.annotation.*;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * Response from the ListBucket RPC Call.
+ */
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlRootElement(name = "ListAllMyBucketsResult",
+    namespace = "http://s3.amazonaws.com/doc/2006-03-01/")
+public class ListBucketResponse {
+  @XmlElementWrapper(name = "Buckets")
+  @XmlElement(name = "Bucket")
+  private List<BucketMetadata> buckets = new ArrayList<>();
+
+  public List<BucketMetadata> getBuckets() {
+    return buckets;
+  }
+
+  @VisibleForTesting
+  public int getBucketsNum() {
+    return buckets.size();
+  }
+
+  public void setBuckets(List<BucketMetadata> buckets) {
+    this.buckets = buckets;
+  }
+
+  public void addBucket(BucketMetadata bucket) {
+    buckets.add(bucket);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/0c2914e5/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java
----------------------------------------------------------------------
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java
new file mode 100644
index 0000000..b9ab977
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ListObjectResponse.java
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3.endpoint;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.ozone.s3.commontypes.CommonPrefix;
+import org.apache.hadoop.ozone.s3.commontypes.KeyMetadata;
+
+/**
+ * Response from the ListObject RPC Call.
+ */
+@XmlAccessorType(XmlAccessType.FIELD)
+@XmlRootElement(name = "ListBucketResult", namespace = "http://s3.amazonaws"
+    + ".com/doc/2006-03-01/")
+public class ListObjectResponse {
+
+  @XmlElement(name = "Name")
+  private String name;
+
+  @XmlElement(name = "Prefix")
+  private String prefix;
+
+  @XmlElement(name = "Marker")
+  private String marker;
+
+  @XmlElement(name = "MaxKeys")
+  private int maxKeys;
+
+  @XmlElement(name = "KeyCount")
+  private int keyCount;
+
+  @XmlElement(name = "Delimiter")
+  private String delimiter = "/";
+
+  @XmlElement(name = "EncodingType")
+  private String encodingType = "url";
+
+  @XmlElement(name = "IsTruncated")
+  private boolean isTruncated;
+
+  @XmlElement(name = "Contents")
+  private List<KeyMetadata> contents = new ArrayList<>();
+
+  @XmlElement(name = "CommonPrefixes")
+  private List<CommonPrefix> commonPrefixes = new ArrayList<>();
+
+  public String getName() {
+    return name;
+  }
+
+  public void setName(String name) {
+    this.name = name;
+  }
+
+  public String getPrefix() {
+    return prefix;
+  }
+
+  public void setPrefix(String prefix) {
+    this.prefix = prefix;
+  }
+
+  public String getMarker() {
+    return marker;
+  }
+
+  public void setMarker(String marker) {
+    this.marker = marker;
+  }
+
+  public int getMaxKeys() {
+    return maxKeys;
+  }
+
+  public void setMaxKeys(int maxKeys) {
+    this.maxKeys = maxKeys;
+  }
+
+  public String getDelimiter() {
+    return delimiter;
+  }
+
+  public void setDelimiter(String delimiter) {
+    this.delimiter = delimiter;
+  }
+
+  public String getEncodingType() {
+    return encodingType;
+  }
+
+  public void setEncodingType(String encodingType) {
+    this.encodingType = encodingType;
+  }
+
+  public boolean isTruncated() {
+    return isTruncated;
+  }
+
+  public void setTruncated(boolean truncated) {
+    isTruncated = truncated;
+  }
+
+  public List<KeyMetadata> getContents() {
+    return contents;
+  }
+
+  public void setContents(
+      List<KeyMetadata> contents) {
+    this.contents = contents;
+  }
+
+  public List<CommonPrefix> getCommonPrefixes() {
+    return commonPrefixes;
+  }
+
+  public void setCommonPrefixes(
+      List<CommonPrefix> commonPrefixes) {
+    this.commonPrefixes = commonPrefixes;
+  }
+
+  public void addKey(KeyMetadata keyMetadata) {
+    contents.add(keyMetadata);
+  }
+
+  public void addPrefix(String relativeKeyName) {
+    commonPrefixes.add(new CommonPrefix(relativeKeyName));
+  }
+
+  public int getKeyCount() {
+    return keyCount;
+  }
+
+  public void setKeyCount(int keyCount) {
+    this.keyCount = keyCount;
+  }
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org


Mime
View raw message