HCP S3 Code Sample

Document created by Michael Ratner on Oct 13, 2017
Version 1Show Document
  • View in full screen mode

/*

* Copyright 2010-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.

*

* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except

* in compliance with the License. A copy of the License is located at http://aws.amazon.com/apache2.0

* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS,

* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the

* specific language governing permissions and limitations under the License.

*/

package com.amazonaws.samples;

import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.Writer;
import java.util.UUID;

import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.Protocol;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.amazonaws.services.s3.model.Bucket;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.ListObjectsRequest;
import com.amazonaws.services.s3.model.ObjectListing;
import com.amazonaws.services.s3.model.PutObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import com.amazonaws.services.s3.model.S3ObjectSummary;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.codec.digest.DigestUtils;

import com.amazonaws.services.s3.transfer.TransferManager;
import com.amazonaws.services.s3.transfer.Upload;

/*
* This sample demonstrates how to make basic requests to Amazon S3 using the AWS SDK for Java.
* Prerequisites: You must have a valid Amazon Web Services developer account, and be signed up

* to use Amazon S3. For more information on Amazon S3, see http://aws.amazon.com/s3.
*

* Important: Be sure to fill in your AWS access credentials in ~/.aws/credentials

* (C:\Users\USER_NAME\.aws\credentials for Windows users) before you try to run this sample.
*/
public class S3Sample {

    public static void main(String[] args) throws IOException {
        /*
Create your credentials file at ~/.aws/credentials (C:\Users\USER_NAME\.aws\credentials for Windows users)

         * and save the following lines after replacing the underlined values with your own.

         *

         * [default]

         * aws_access_key_id = YOUR_ACCESS_KEY_ID

         * aws_secret_access_key = YOUR_SECRET_ACCESS_KEY

         */

//        if (true) {
//            System.setProperty("javax.net.debug","all");
//        }

  /*

   * Initialize access credentials for the HS3 client.

   */

//      String accessKey = new String((new Base64()).encodeBase64(username.getBytes()));
//      String secretKey = new String((new DigestUtils()).md5Hex(password));
//
        String accessKey = "dXNlcjE=";
        String secretKey = "bed128365216c019988915ed3add75fb";
// String secretKey = "passw0rd";
  /*

   * Set up the client configuration to allow for 200 max HTTP

   * connections, as this is an HCP best practice.

   */

        ClientConfiguration myClientConfig = new ClientConfiguration();
        myClientConfig.setMaxConnections(200);
        myClientConfig.setConnectionTimeout(5000);   //Just 4 test

  /*

   * Use the HTTP protocol to communicate with HCP. For you to use HTTPS,

   * HCP may need to have a trusted SSL server certificate.

   */

        myClientConfig.setProtocol(Protocol.HTTP);
        myClientConfig.set
//        myClientConfig.withSignerOverride("S3SignerType");

  /*

   * Build the hs3Client to be used for communication with HCP.

   */

        AmazonS3Client s3 = new AmazonS3Client(new BasicAWSCredentials(accessKey, secretKey),myClientConfig);
        s3.setS3ClientOptions(new S3ClientOptions().withPathStyleAccess(true));
        // Set up the service point to be the tenant in HCP.
        s3.setEndpoint("ten1.cluster59l-vm4.lab.archivas.com");

        String bucketName = "my-first-s3-bucket-" + UUID.randomUUID();
        String key = "MyObjectKey";

        System.out.println("===========================================");
        System.out.println("Getting Started with Amazon S3");
        System.out.println("===========================================\n");

        try {
            /*

             * Create a new S3 bucket - Amazon S3 bucket names are globally unique,

             * so once a bucket name has been taken by any user, you can't create

             * another bucket with that same name.

             *

             * You can optionally specify a location for your bucket if you want to

             * keep your data closer to your applications or users.

             */

            System.out.println("Creating bucket " + bucketName + "\n");
            s3.createBucket(bucketName);

            /*

             * List the buckets in your account

             */

            System.out.println("Listing buckets");
            for (Bucket bucket : s3.listBuckets()) {
                System.out.println(" - " + bucket.getName());
            }
            System.out.println();

            /*

             * Upload an object to your bucket - You can easily upload a file to

             * S3, or upload directly an InputStream if you know the length of

             * the data in the stream. You can also specify your own metadata

             * when uploading to S3, which allows you set a variety of options

             * like content-type and content-encoding, plus additional metadata

             * specific to your applications.

             */

            System.out.println("Uploading a new object to S3 from a file\n");
            File mytestfile = new File("/Users/japark/Development/LargeTestFile.txt");
            //single part
            PutObjectRequest por = new PutObjectRequest(bucketName, key, mytestfile);
            s3.putObject(por);
//multi
            System.out.println();
            System.out.println("--- first put finished---");
            System.out.println();
//          s3.putObject(por);
//          System.out.println("--- second put finished---");

            /*

             * Download an object - When you download an object, you get all of

             * the object's metadata and a stream from which to read the contents.

             * It's important to read the contents of the stream as quickly as

             * possibly since the data is streamed directly from Amazon S3 and your

             * network connection will remain open until you read all the data or

             * close the input stream.

             *

             * GetObjectRequest also supports several other options, including

             * conditional downloading of objects based on modification times,

             * ETags, and selectively downloading a range of an object.

             */

//            System.out.println("Downloading an object");
//            S3Object object = s3.getObject(new GetObjectRequest(bucketName, key));
//            System.out.println("Content-Type: "  + object.getObjectMetadata().getContentType());
//            displayTextInputStream(object.getObjectContent());

            /*

             * List objects in your bucket by prefix - There are many options for

             * listing the objects in your bucket.  Keep in mind that buckets with

             * many objects might truncate their results when listing their objects,

             * so be sure to check if the returned object listing is truncated, and

             * use the AmazonS3.listNextBatchOfObjects(...) operation to retrieve

             * additional results.

             */

//            System.out.println("Listing objects");
//            ObjectListing objectListing = s3.listObjects(new ListObjectsRequest()
//                    .withBucketName(bucketName)
//                    .withPrefix("My"));
//            for (S3ObjectSummary objectSummary : objectListing.getObjectSummaries()) {
//                System.out.println(" - " + objectSummary.getKey() + "  " +
//                        "(size = " + objectSummary.getSize() + ")");
//            }
//            System.out.println();

            /*

             * Delete an object - Unless versioning has been turned on for your bucket,

             * there is no way to undelete an object, so use caution when deleting objects.

             */

//            System.out.println("Deleting an object\n");
//            s3.deleteObject(bucketName, key);

            /*

             * Delete a bucket - A bucket must be completely empty before it can be

             * deleted, so remember to delete any objects from your buckets before

             * you try to delete them.

             */

//            System.out.println("Deleting bucket " + bucketName + "\n");
//            s3.deleteBucket(bucketName);
//        } catch (AmazonServiceException ase) {
//            System.out.println("Caught an AmazonServiceException, which means your request made it "
//                    + "to Amazon S3, but was rejected with an error response for some reason.");
//            System.out.println("Error Message:    " + ase.getMessage());
//            System.out.println("HTTP Status Code: " + ase.getStatusCode());
//            System.out.println("AWS Error Code:   " + ase.getErrorCode());
//            System.out.println("Error Type:       " + ase.getErrorType());
//            System.out.println("Request ID:       " + ase.getRequestId());
        } catch (AmazonClientException ace) {
            System.out.println("Caught an AmazonClientException, which means the client encountered "
                    + "a serious internal problem while trying to communicate with S3, "
                    + "such as not being able to access the network.");
            System.out.println("Error Message: " + ace.getMessage());
        }
    }

    /**
     * Creates a temporary file with text data to demonstrate uploading a file
     * to Amazon S3
     *
     * @return A newly created temporary file with text data.
     *
     * @throws IOException
     */
    private static File createSampleFile() throws IOException {
        File file = File.createTempFile("aws-java-sdk-", ".txt");
        file.deleteOnExit();

        Writer writer = new OutputStreamWriter(new FileOutputStream(file));
        for (int idx = 0 ; idx < 10; idx ++){
            writer.write("abcdefghijklmnopqrstuvwxyz\n");
        writer.write("01234567890112345678901234\n");
        writer.write("!@#$%^&*()-=[]{};':',.<>/?\n");
        writer.write("01234567890112345678901234\n");
        writer.write("abcdefghijklmnopqrstuvwxyz\n");
        }
        writer.close();
        return file;
    }

    /**
     * Displays the contents of the specified input stream as text.
     *
     * @param input
     *     The input stream to display as text.
     *
     * @throws IOException
     */
    private static void displayTextInputStream(InputStream input) throws IOException {
        BufferedReader reader = new BufferedReader(new InputStreamReader(input));
        while (true) {
            String line = reader.readLine();
            if (line == null) break;
            System.out.println("    " + line);
        }
        System.out.println();
    }
}

Attachments

    Outcomes