Upload a stream of unknown size to an Amazon S3 object using an AWS SDK - Amazon Simple Storage Service

Upload a stream of unknown size to an Amazon S3 object using an AWS SDK

The following code examples show how to upload a stream of unknown size to an Amazon S3 object.

Java
SDK for Java 2.x
Note

There's more on GitHub. Find the complete example and learn how to set up and run in the AWS Code Examples Repository.

Use the AWS CRT-based S3 Client.

import com.example.s3.util.AsyncExampleUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.core.async.AsyncRequestBody; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.s3.S3AsyncClient; import software.amazon.awssdk.services.s3.model.PutObjectResponse; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class PutObjectFromStreamAsync { private static final Logger logger = LoggerFactory.getLogger(PutObjectFromStreamAsync.class); public static void main(String[] args) { String bucketName = "amzn-s3-demo-bucket-" + UUID.randomUUID(); // Change bucket name. String key = UUID.randomUUID().toString(); AsyncExampleUtils.createBucket(bucketName); try { PutObjectFromStreamAsync example = new PutObjectFromStreamAsync(); S3AsyncClient s3AsyncClientCrt = S3AsyncClient.crtCreate(); PutObjectResponse putObjectResponse = example.putObjectFromStreamCrt(s3AsyncClientCrt, bucketName, key); logger.info("Object {} etag: {}", key, putObjectResponse.eTag()); logger.info("Object {} uploaded to bucket {}.", key, bucketName); } catch (SdkException e) { logger.error(e.getMessage(), e); } finally { AsyncExampleUtils.deleteObject(bucketName, key); AsyncExampleUtils.deleteBucket(bucketName); } } /** * @param s33CrtAsyncClient - To upload content from a stream of unknown size, use can the AWS CRT-based S3 client. * @param bucketName - The name of the bucket. * @param key - The name of the object. * @return software.amazon.awssdk.services.s3.model.PutObjectResponse - Returns metadata pertaining to the put object operation. */ public PutObjectResponse putObjectFromStreamCrt(S3AsyncClient s33CrtAsyncClient, String bucketName, String key) { // AsyncExampleUtils.randomString() returns a random string up to 100 characters. String randomString = AsyncExampleUtils.randomString(); logger.info("random string to upload: {}: length={}", randomString, randomString.length()); InputStream inputStream = new ByteArrayInputStream(randomString.getBytes()); // Executor required to handle reading from the InputStream on a separate thread so the main upload is not blocked. ExecutorService executor = Executors.newSingleThreadExecutor(); // Specify `null` for the content length when you don't know the content length. AsyncRequestBody body = AsyncRequestBody.fromInputStream(inputStream, null, executor); CompletableFuture<PutObjectResponse> responseFuture = s33CrtAsyncClient.putObject(r -> r.bucket(bucketName).key(key), body); PutObjectResponse response = responseFuture.join(); // Wait for the response. logger.info("Object {} uploaded to bucket {}.", key, bucketName); executor.shutdown(); return response; } }

Use the standard asynchronous S3 client with multipart upload enabled.

import com.example.s3.util.AsyncExampleUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.core.async.AsyncRequestBody; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.s3.S3AsyncClient; import software.amazon.awssdk.services.s3.model.PutObjectResponse; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class PutObjectFromStreamAsyncMp { private static final Logger logger = LoggerFactory.getLogger(PutObjectFromStreamAsyncMp.class); public static void main(String[] args) { String bucketName = "amzn-s3-demo-bucket-" + UUID.randomUUID(); // Change bucket name. String key = UUID.randomUUID().toString(); AsyncExampleUtils.createBucket(bucketName); try { PutObjectFromStreamAsyncMp example = new PutObjectFromStreamAsyncMp(); S3AsyncClient s3AsyncClientMp = S3AsyncClient.builder().multipartEnabled(true).build(); PutObjectResponse putObjectResponse = example.putObjectFromStreamMp(s3AsyncClientMp, bucketName, key); logger.info("Object {} etag: {}", key, putObjectResponse.eTag()); logger.info("Object {} uploaded to bucket {}.", key, bucketName); } catch (SdkException e) { logger.error(e.getMessage(), e); } finally { AsyncExampleUtils.deleteObject(bucketName, key); AsyncExampleUtils.deleteBucket(bucketName); } } /** * @param s3AsyncClientMp - To upload content from a stream of unknown size, use can the S3 asynchronous client with multipart enabled. * @param bucketName - The name of the bucket. * @param key - The name of the object. * @return software.amazon.awssdk.services.s3.model.PutObjectResponse - Returns metadata pertaining to the put object operation. */ public PutObjectResponse putObjectFromStreamMp(S3AsyncClient s3AsyncClientMp, String bucketName, String key) { // AsyncExampleUtils.randomString() returns a random string up to 100 characters. String randomString = AsyncExampleUtils.randomString(); logger.info("random string to upload: {}: length={}", randomString, randomString.length()); InputStream inputStream = new ByteArrayInputStream(randomString.getBytes()); // Executor required to handle reading from the InputStream on a separate thread so the main upload is not blocked. ExecutorService executor = Executors.newSingleThreadExecutor(); // Specify `null` for the content length when you don't know the content length. AsyncRequestBody body = AsyncRequestBody.fromInputStream(inputStream, null, executor); CompletableFuture<PutObjectResponse> responseFuture = s3AsyncClientMp.putObject(r -> r.bucket(bucketName).key(key), body); PutObjectResponse response = responseFuture.join(); // Wait for the response. logger.info("Object {} uploaded to bucket {}.", key, bucketName); executor.shutdown(); return response; } }

Use the Amazon S3 Transfer Manager.

import com.example.s3.util.AsyncExampleUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import software.amazon.awssdk.core.async.AsyncRequestBody; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.transfer.s3.S3TransferManager; import software.amazon.awssdk.transfer.s3.model.CompletedUpload; import software.amazon.awssdk.transfer.s3.model.Upload; import java.io.ByteArrayInputStream; import java.io.InputStream; import java.util.UUID; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; public class UploadStream { private static final Logger logger = LoggerFactory.getLogger(UploadStream.class); public static void main(String[] args) { String bucketName = "amzn-s3-demo-bucket" + UUID.randomUUID(); String key = UUID.randomUUID().toString(); AsyncExampleUtils.createBucket(bucketName); try { UploadStream example = new UploadStream(); CompletedUpload completedUpload = example.uploadStream(S3TransferManager.create(), bucketName, key); logger.info("Object {} etag: {}", key, completedUpload.response().eTag()); logger.info("Object {} uploaded to bucket {}.", key, bucketName); } catch (SdkException e) { logger.error(e.getMessage(), e); } finally { AsyncExampleUtils.deleteObject(bucketName, key); AsyncExampleUtils.deleteBucket(bucketName); } } /** * @param transferManager - To upload content from a stream of unknown size, you can use the S3TransferManager based on the AWS CRT-based S3 client. * @param bucketName - The name of the bucket. * @param key - The name of the object. * @return - software.amazon.awssdk.transfer.s3.model.CompletedUpload - The result of the completed upload. */ public CompletedUpload uploadStream(S3TransferManager transferManager, String bucketName, String key) { // AsyncExampleUtils.randomString() returns a random string up to 100 characters. String randomString = AsyncExampleUtils.randomString(); logger.info("random string to upload: {}: length={}", randomString, randomString.length()); InputStream inputStream = new ByteArrayInputStream(randomString.getBytes()); // Executor required to handle reading from the InputStream on a separate thread so the main upload is not blocked. ExecutorService executor = Executors.newSingleThreadExecutor(); // Specify `null` for the content length when you don't know the content length. AsyncRequestBody body = AsyncRequestBody.fromInputStream(inputStream, null, executor); Upload upload = transferManager.upload(builder -> builder .requestBody(body) .putObjectRequest(req -> req.bucket(bucketName).key(key)) .build()); CompletedUpload completedUpload = upload.completionFuture().join(); executor.shutdown(); return completedUpload; } }
Swift
SDK for Swift
Note

There's more on GitHub. Find the complete example and learn how to set up and run in the AWS Code Examples Repository.

import ArgumentParser import AWSClientRuntime import AWSS3 import Foundation import Smithy import SmithyHTTPAPI import SmithyStreams /// Upload a file to the specified bucket. /// /// - Parameters: /// - bucket: The Amazon S3 bucket name to store the file into. /// - key: The name (or path) of the file to upload to in the `bucket`. /// - sourcePath: The pathname on the local filesystem of the file to /// upload. func uploadFile(sourcePath: String, bucket: String, key: String?) async throws { let fileURL: URL = URL(fileURLWithPath: sourcePath) let fileName: String // If no key was provided, use the last component of the filename. if key == nil { fileName = fileURL.lastPathComponent } else { fileName = key! } let s3Client = try await S3Client() // Create a FileHandle for the source file. let fileHandle = FileHandle(forReadingAtPath: sourcePath) guard let fileHandle = fileHandle else { throw TransferError.readError } // Create a byte stream to retrieve the file's contents. This uses the // Smithy FileStream and ByteStream types. let stream = FileStream(fileHandle: fileHandle) let body = ByteStream.stream(stream) // Create a `PutObjectInput` with the ByteStream as the body of the // request's data. The AWS SDK for Swift will handle sending the // entire file in chunks, regardless of its size. let putInput = PutObjectInput( body: body, bucket: bucket, key: fileName ) do { _ = try await s3Client.putObject(input: putInput) } catch { throw TransferError.uploadError("Error uploading the file: \(error)") } print("File uploaded to \(fileURL.path).") }

For a complete list of AWS SDK developer guides and code examples, see Developing with Amazon S3 using the AWS SDKs. This topic also includes information about getting started and details about previous SDK versions.