diff --git a/.doc_gen/metadata/s3_metadata.yaml b/.doc_gen/metadata/s3_metadata.yaml index b3e0532a61e..690b616c405 100644 --- a/.doc_gen/metadata/s3_metadata.yaml +++ b/.doc_gen/metadata/s3_metadata.yaml @@ -2952,6 +2952,14 @@ s3_Scenario_PresignedUrl: - snippet_tags: - s3.php.presigned_url.complete - php.example_code.s3.service.S3Service + SAP ABAP: + versions: + - sdk_version: 1 + github: sap-abap/services/s3 + excerpts: + - description: Create presigned requests to GET S3 objects. + snippet_tags: + - s3.abapv1.s3_presigned_url_get services: s3: {} s3_Scenario_ObjectVersioningUsage: diff --git a/cpp/example_code/s3/put_object_async.cpp b/cpp/example_code/s3/put_object_async.cpp index b671d459a4c..3a4e5b0e773 100644 --- a/cpp/example_code/s3/put_object_async.cpp +++ b/cpp/example_code/s3/put_object_async.cpp @@ -47,15 +47,15 @@ std::condition_variable AwsDoc::S3::upload_variable; */ // snippet-start:[s3.cpp.put_object_async_finished.code] -void putObjectAsyncFinished(const Aws::S3::S3Client *s3Client, +void uploadFileAsyncFinished(const Aws::S3::S3Client *s3Client, const Aws::S3::Model::PutObjectRequest &request, const Aws::S3::Model::PutObjectOutcome &outcome, const std::shared_ptr &context) { if (outcome.IsSuccess()) { - std::cout << "Success: putObjectAsyncFinished: Finished uploading '" + std::cout << "Success: uploadFileAsyncFinished: Finished uploading '" << context->GetUUID() << "'." << std::endl; } else { - std::cerr << "Error: putObjectAsyncFinished: " << + std::cerr << "Error: uploadFileAsyncFinished: " << outcome.GetError().GetMessage() << std::endl; } @@ -68,17 +68,17 @@ void putObjectAsyncFinished(const Aws::S3::S3Client *s3Client, //! Routine which demonstrates adding an object to an Amazon S3 bucket, asynchronously. /*! \param s3Client: Instance of the S3 Client. + \param request: Instance of the put object request. \param bucketName: Name of the bucket. \param fileName: Name of the file to put in the bucket. \return bool: Function succeeded. */ // snippet-start:[s3.cpp.put_object_async.code] -bool AwsDoc::S3::putObjectAsync(const Aws::S3::S3Client &s3Client, +bool AwsDoc::S3::uploadFileAsync(const Aws::S3::S3Client &s3Client, + Aws::S3::Model::PutObjectRequest &request, const Aws::String &bucketName, const Aws::String &fileName) { - // Create and configure the asynchronous put object request. - Aws::S3::Model::PutObjectRequest request; request.SetBucket(bucketName); request.SetKey(fileName); @@ -100,9 +100,9 @@ bool AwsDoc::S3::putObjectAsync(const Aws::S3::S3Client &s3Client, context->SetUUID(fileName); // Make the asynchronous put object call. Queue the request into a - // thread executor and call the putObjectAsyncFinished function when the + // thread executor and call the uploadFileAsyncFinished function when the // operation has finished. - s3Client.PutObjectAsync(request, putObjectAsyncFinished, context); + s3Client.PutObjectAsync(request, uploadFileAsyncFinished, context); return true; } @@ -135,7 +135,7 @@ int main(int argc, char* argv[]) return 1; } - Aws::SDKOptions options; + const Aws::SDKOptions options; Aws::InitAPI(options); { const Aws::String fileName = argv[1]; @@ -150,13 +150,18 @@ int main(int argc, char* argv[]) // Create and configure the Amazon S3 client. // This client must be declared here, as this client must exist // until the put object operation finishes. - Aws::S3::S3ClientConfiguration config; + const Aws::S3::S3ClientConfiguration config; // Optional: Set to the AWS Region in which the bucket was created (overrides config file). // config.region = "us-east-1"; - Aws::S3::S3Client s3Client(config); + const Aws::S3::S3Client s3Client(config); - AwsDoc::S3::putObjectAsync(s3Client, bucketName, fileName); + // Create the request object. + // This request object must be declared here, because the object must exist + // until the put object operation finishes. + Aws::S3::Model::PutObjectRequest request; + + AwsDoc::S3::uploadFileAsync(s3Client, request, bucketName, fileName); std::cout << "main: Waiting for file upload attempt..." << std::endl << std::endl; diff --git a/cpp/example_code/s3/s3_examples.h b/cpp/example_code/s3/s3_examples.h index 2da5777698f..245812764f9 100644 --- a/cpp/example_code/s3/s3_examples.h +++ b/cpp/example_code/s3/s3_examples.h @@ -94,7 +94,8 @@ namespace AwsDoc { const Aws::String &granteeID, const Aws::String &granteeEmailAddress, const Aws::String &granteeURI, const Aws::S3::S3ClientConfiguration &clientConfig); - bool putObjectAsync(const Aws::S3::S3Client &s3Client, + bool uploadFileAsync(const Aws::S3::S3Client &s3Client, + Aws::S3::Model::PutObjectRequest &request, const Aws::String &bucketName, const Aws::String &fileName); diff --git a/cpp/example_code/s3/tests/gtest_put_object_async.cpp b/cpp/example_code/s3/tests/gtest_put_object_async.cpp index 203d0a25700..739d19c9db6 100644 --- a/cpp/example_code/s3/tests/gtest_put_object_async.cpp +++ b/cpp/example_code/s3/tests/gtest_put_object_async.cpp @@ -12,6 +12,7 @@ #include #include #include +#include #include "../s3_examples.h" #include "S3_GTests.h" @@ -28,8 +29,9 @@ namespace AwsDocTest { { Aws::S3::S3Client client(*s_clientConfig); + Aws::S3::Model::PutObjectRequest request; std::unique_lock lock(AwsDoc::S3::upload_mutex); - bool result = AwsDoc::S3::putObjectAsync(client, bucketNames[0], testFile); + bool result = AwsDoc::S3::uploadFileAsync(client, request, bucketNames[0], testFile); AwsDoc::S3::upload_variable.wait(lock); diff --git a/php/example_code/class_examples/CommandPool.php b/php/example_code/class_examples/CommandPool.php index d3aa06993d0..36feac6a10b 100644 --- a/php/example_code/class_examples/CommandPool.php +++ b/php/example_code/class_examples/CommandPool.php @@ -37,7 +37,7 @@ $s3Service = new S3Service($client, true); -$bucket = 'my-bucket-' . uniqid(); // This bucket will be deleted at the end of this example. +$bucket = 'amzn-s3-demo-bucket-' . uniqid(); // This bucket will be deleted at the end of this example. $client->createBucket([ "Bucket" => $bucket, diff --git a/php/example_code/cloudfront/CreateDistributionS3.php b/php/example_code/cloudfront/CreateDistributionS3.php index 7cdae2b0856..edc47482a15 100644 --- a/php/example_code/cloudfront/CreateDistributionS3.php +++ b/php/example_code/cloudfront/CreateDistributionS3.php @@ -50,7 +50,7 @@ function createS3Distribution($cloudFrontClient, $distribution) function createsTheS3Distribution() { $originName = 'my-unique-origin-name'; - $s3BucketURL = 'my-bucket-name.s3.amazonaws.com'; + $s3BucketURL = 'amzn-s3-demo-bucket.s3.amazonaws.com'; $callerReference = 'my-unique-caller-reference'; $comment = 'my-comment-about-this-distribution'; $defaultCacheBehavior = [ diff --git a/php/example_code/cloudfront/old_tests/CreateDistributionS3Test.php b/php/example_code/cloudfront/old_tests/CreateDistributionS3Test.php index 2a65bf09728..8cf9991a45c 100644 --- a/php/example_code/cloudfront/old_tests/CreateDistributionS3Test.php +++ b/php/example_code/cloudfront/old_tests/CreateDistributionS3Test.php @@ -24,7 +24,7 @@ public function testCreatesAnS3Distribution() require(__DIR__ . '/../CreateDistributionS3.php'); $originName = 'my-unique-origin-name'; - $s3BucketURL = 'my-bucket-name.s3.amazonaws.com'; + $s3BucketURL = 'amzn-s3-demo-bucket.s3.amazonaws.com'; $callerReference = 'my-unique-caller-reference'; $comment = 'my-comment-about-this-distribution'; $defaultCacheBehavior = [ diff --git a/php/example_code/cloudfront/old_tests/DisableDistributionS3Test.php b/php/example_code/cloudfront/old_tests/DisableDistributionS3Test.php index 4f33c89bdfe..6171756575a 100644 --- a/php/example_code/cloudfront/old_tests/DisableDistributionS3Test.php +++ b/php/example_code/cloudfront/old_tests/DisableDistributionS3Test.php @@ -82,7 +82,7 @@ protected function setUp(): void 'Origins' => [ 'Items' => [ [ - 'DomainName' => 'my-bucket-name.s3.amazonaws.com', + 'DomainName' => 'amzn-s3-demo-bucket.s3.amazonaws.com', 'Id' => 'my-unique-origin-name', 'OriginPath' => '', 'CustomHeaders' => [ diff --git a/php/example_code/cloudfront/old_tests/UpdateDistributionS3Test.php b/php/example_code/cloudfront/old_tests/UpdateDistributionS3Test.php index addaac43567..c6e0527f8fb 100644 --- a/php/example_code/cloudfront/old_tests/UpdateDistributionS3Test.php +++ b/php/example_code/cloudfront/old_tests/UpdateDistributionS3Test.php @@ -82,7 +82,7 @@ protected function setUp(): void 'Origins' => [ 'Items' => [ [ - 'DomainName' => 'my-bucket-name.s3.amazonaws.com', + 'DomainName' => 'amzn-s3-demo-bucket.s3.amazonaws.com', 'Id' => 'my-unique-origin-name', 'OriginPath' => '', 'CustomHeaders' => [ diff --git a/php/example_code/cloudwatch/DescribeAlarmsForMetric.php b/php/example_code/cloudwatch/DescribeAlarmsForMetric.php index 8e7640aa74e..859d9f5c174 100644 --- a/php/example_code/cloudwatch/DescribeAlarmsForMetric.php +++ b/php/example_code/cloudwatch/DescribeAlarmsForMetric.php @@ -79,7 +79,7 @@ function describeTheAlarmsForMetric() ], [ 'Name' => 'BucketName', - 'Value' => 'my-bucket' + 'Value' => 'amzn-s3-demo-bucket' ] ]; diff --git a/php/example_code/cloudwatch/GetMetricStatistics.php b/php/example_code/cloudwatch/GetMetricStatistics.php index 80721d05f4d..8571f212332 100644 --- a/php/example_code/cloudwatch/GetMetricStatistics.php +++ b/php/example_code/cloudwatch/GetMetricStatistics.php @@ -147,7 +147,7 @@ function getTheMetricStatistics() ], [ 'Name' => 'BucketName', - 'Value' => 'my-bucket' + 'Value' => 'amzn-s3-demo-bucket' ] ]; $startTime = strtotime('-3 days'); diff --git a/php/example_code/cloudwatch/old_tests/DescribeAlarmsForMetricTest.php b/php/example_code/cloudwatch/old_tests/DescribeAlarmsForMetricTest.php index a9cf3445a5b..92bc92e72fa 100644 --- a/php/example_code/cloudwatch/old_tests/DescribeAlarmsForMetricTest.php +++ b/php/example_code/cloudwatch/old_tests/DescribeAlarmsForMetricTest.php @@ -32,7 +32,7 @@ public function testDescribesAlarmsForAMetric() ], [ 'Name' => 'BucketName', - 'Value' => 'my-bucket' + 'Value' => 'amzn-s3-demo-bucket' ] ]; diff --git a/php/example_code/lambda/GettingStartedWithLambda.php b/php/example_code/lambda/GettingStartedWithLambda.php index bcd965466e2..3b1d4760d31 100644 --- a/php/example_code/lambda/GettingStartedWithLambda.php +++ b/php/example_code/lambda/GettingStartedWithLambda.php @@ -53,7 +53,7 @@ public function run() echo "Attached the AWSLambdaBasicExecutionRole to {$role['RoleName']}.\n"; echo "\nNow let's create an S3 bucket and upload our Lambda code there.\n"; - $bucketName = "test-example-bucket-$uniqid"; + $bucketName = "amzn-s3-demo-bucket-$uniqid"; $s3client->createBucket([ 'Bucket' => $bucketName, ]); diff --git a/php/example_code/lambda/tests/LambdaTest.php b/php/example_code/lambda/tests/LambdaTest.php index 42349d34bf0..80396b5e520 100644 --- a/php/example_code/lambda/tests/LambdaTest.php +++ b/php/example_code/lambda/tests/LambdaTest.php @@ -53,7 +53,7 @@ public function testSingleActionCalls() } ] }"; - $bucketName = "test-example-bucket-$uniqid"; + $bucketName = "amzn-s3-demo-bucket-$uniqid"; $this->s3client->createBucket([ 'Bucket' => $bucketName, ]); diff --git a/php/example_code/s3/CreateBucket.php b/php/example_code/s3/CreateBucket.php index 4a88c74eb85..6a8fd8816bb 100644 --- a/php/example_code/s3/CreateBucket.php +++ b/php/example_code/s3/CreateBucket.php @@ -46,7 +46,7 @@ function createTheBucket() 'version' => '2006-03-01' ]); - echo createBucket($s3Client, 'my-bucket'); + echo createBucket($s3Client, 'amzn-s3-demo-bucket'); } // Uncomment the following line to run this code in an AWS account. diff --git a/php/example_code/s3/ErrorHandling.php b/php/example_code/s3/ErrorHandling.php index 473206424dc..70c22cc3774 100644 --- a/php/example_code/s3/ErrorHandling.php +++ b/php/example_code/s3/ErrorHandling.php @@ -33,7 +33,7 @@ $s3Client = $sdk->createS3(); try { - $s3Client->createBucket(['Bucket' => 'my-bucket']); + $s3Client->createBucket(['Bucket' => 'amzn-s3-demo-bucket']); } catch (S3Exception $e) { // Catch an S3 specific exception. echo $e->getMessage(); @@ -53,7 +53,7 @@ // snippet-start:[s3.php.error_handling.async] //Asynchronous Error Handling // snippet-start:[s3.php.error_handling.promise] -$promise = $s3Client->createBucketAsync(['Bucket' => 'my-bucket']); +$promise = $s3Client->createBucketAsync(['Bucket' => 'amzn-s3-demo-bucket']); // snippet-end:[s3.php.error_handling.promise] $promise->otherwise(function ($reason) { var_dump($reason); diff --git a/php/example_code/s3/PutObjectServiceOperations.php b/php/example_code/s3/PutObjectServiceOperations.php index 0afd035e918..49319666234 100644 --- a/php/example_code/s3/PutObjectServiceOperations.php +++ b/php/example_code/s3/PutObjectServiceOperations.php @@ -36,14 +36,14 @@ // Send a PutObject request and get the result object. $result = $s3Client->putObject([ - 'Bucket' => 'my-bucket', + 'Bucket' => 'amzn-s3-demo-bucket', 'Key' => 'my-key', 'Body' => 'this is the body!' ]); // Download the contents of the object. $result = $s3Client->getObject([ - 'Bucket' => 'my-bucket', + 'Bucket' => 'amzn-s3-demo-bucket', 'Key' => 'my-key' ]); diff --git a/php/example_code/s3/s3BucketAcl.php b/php/example_code/s3/s3BucketAcl.php index 539cdd8ff04..cd8f4e4100b 100644 --- a/php/example_code/s3/s3BucketAcl.php +++ b/php/example_code/s3/s3BucketAcl.php @@ -26,7 +26,7 @@ ]); // Gets the access control policy for a bucket -$bucket = 'my-s3-bucket'; +$bucket = 'amzn-s3-demo-bucket'; try { $resp = $s3Client->getBucketAcl([ 'Bucket' => $bucket diff --git a/php/example_code/s3/s3BucketPolicy.php b/php/example_code/s3/s3BucketPolicy.php index 4124cb55ca4..10e81cf7814 100644 --- a/php/example_code/s3/s3BucketPolicy.php +++ b/php/example_code/s3/s3BucketPolicy.php @@ -25,7 +25,7 @@ 'version' => '2006-03-01' ]); -$bucket = 'my-s3-bucket'; +$bucket = 'amzn-s3-demo-bucket'; // Get the policy of a specific bucket try { diff --git a/php/example_code/s3/s3ObjectAcl.php b/php/example_code/s3/s3ObjectAcl.php index 76f62bbbdb0..8f25fc3d2ab 100644 --- a/php/example_code/s3/s3ObjectAcl.php +++ b/php/example_code/s3/s3ObjectAcl.php @@ -19,7 +19,7 @@ ]); // Gets the access control list (ACL) of an object. -$bucket = 'my-s3-bucket'; +$bucket = 'amzn-s3-demo-bucket'; $key = 'my-object'; try { $resp = $s3Client->getObjectAcl([ diff --git a/php/example_code/s3/s3WebHost.php b/php/example_code/s3/s3WebHost.php index 28aa3581891..667e8765d88 100644 --- a/php/example_code/s3/s3WebHost.php +++ b/php/example_code/s3/s3WebHost.php @@ -26,7 +26,7 @@ ]); // Retrieving the Bucket Website Configuration -$bucket = 'my-s3-bucket'; +$bucket = 'amzn-s3-demo-bucket'; try { $resp = $s3Client->getBucketWebsite([ 'Bucket' => $bucket diff --git a/sap-abap/services/s3/README.md b/sap-abap/services/s3/README.md index d8454a2bec8..bb0fd168e11 100644 --- a/sap-abap/services/s3/README.md +++ b/sap-abap/services/s3/README.md @@ -48,6 +48,13 @@ Code excerpts that show you how to call individual service functions. - [ListObjectsV2](zcl_aws1_s3_actions.clas.abap#L197) - [PutObject](zcl_aws1_s3_actions.clas.abap#L216) +### Scenarios + +Code examples that show you how to accomplish a specific task by calling multiple +functions within the same service. + +- [Create a presigned URL](zcl_aws1_s3_scenario.clas.abap) + @@ -79,6 +86,18 @@ This example shows you how to do the following: +#### Create a presigned URL + +This example shows you how to create a presigned URL for Amazon S3 and upload an object. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. diff --git a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.abap b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.abap index 0857d599654..1921a5dc280 100644 --- a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.abap +++ b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.abap @@ -10,11 +10,25 @@ CLASS zcl_aws1_s3_scenario DEFINITION METHODS getting_started_with_s3 IMPORTING - !iv_bucket_name TYPE /aws1/s3_bucketname - !iv_key TYPE /aws1/s3_objectkey - !iv_copy_to_folder TYPE /aws1/s3_bucketname + !iv_bucket_name TYPE /aws1/s3_bucketname + !iv_key TYPE /aws1/s3_objectkey + !iv_copy_to_folder TYPE /aws1/s3_bucketname EXPORTING - !oo_result TYPE REF TO /aws1/cl_knsputrecordoutput . + !oo_result TYPE REF TO /aws1/cl_knsputrecordoutput + RAISING + /aws1/cx_rt_service_generic + /aws1/cx_rt_technical_generic + /aws1/cx_rt_no_auth_generic . + METHODS presigner_get + IMPORTING + !iv_bucket_name TYPE /aws1/s3_bucketname + !iv_key TYPE /aws1/s3_objectkey + RETURNING + VALUE(ov_url) TYPE string + RAISING + /aws1/cx_rt_service_generic + /aws1/cx_rt_technical_generic + /aws1/cx_rt_no_auth_generic . PROTECTED SECTION. PRIVATE SECTION. ENDCLASS. @@ -140,4 +154,44 @@ CLASS ZCL_AWS1_S3_SCENARIO IMPLEMENTATION. "snippet-end:[s3.abapv1.getting_started_with_s3] ENDMETHOD. + + + METHOD presigner_get. + CONSTANTS cv_pfl TYPE /aws1/rt_profile_id VALUE 'ZCODE_DEMO'. + + "snippet-start:[s3.abapv1.s3_presigned_url_get] + " iv_bucket_name is the bucket name + " iv_key is the object name like "myfile.txt" + + DATA(lo_session) = /aws1/cl_rt_session_aws=>create( cv_pfl ). + DATA(lo_s3) = /aws1/cl_s3_factory=>create( lo_session ). + + "Upload a nice Hello World file to an S3 bucket." + TRY. + DATA(lv_contents) = cl_abap_codepage=>convert_to( 'Hello, World' ). + lo_s3->putobject( + iv_bucket = iv_bucket_name + iv_key = iv_key + iv_body = lv_contents + iv_contenttype = 'text/plain' ). + MESSAGE 'Object uploaded to S3 bucket.' TYPE 'I'. + CATCH /aws1/cx_s3_nosuchbucket. + MESSAGE 'Bucket does not exist.' TYPE 'E'. + ENDTRY. + + " now generate a presigned URL with a 600-second expiration + DATA(lo_presigner) = lo_s3->get_presigner( iv_expires_sec = 600 ). + " the presigner getobject() method has the same signature as + " lo_s3->getobject(), but it doesn't actually make the call. + " to the service. It just prepares a presigned URL for a future call + DATA(lo_presigned_req) = lo_presigner->getobject( + iv_bucket = iv_bucket_name + iv_key = iv_key ). + + " You can provide this URL to a web page, user, email etc so they + " can retrieve the file. The URL will expire in 10 minutes. + ov_url = lo_presigned_req->get_url( ). + "snippet-end:[s3.abapv1.s3_presigned_url_get] + + ENDMETHOD. ENDCLASS. diff --git a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.testclasses.abap b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.testclasses.abap index a2f5cb9fc97..5ffd5c8e5f8 100644 --- a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.testclasses.abap +++ b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.testclasses.abap @@ -18,7 +18,7 @@ CLASS ltc_zcl_aws1_s3_scenario DEFINITION FOR TESTING DURATION SHORT RISK LEVEL DATA ao_s3_scenario TYPE REF TO zcl_aws1_s3_scenario. METHODS getting_started_scenario FOR TESTING RAISING /aws1/cx_rt_generic. - + METHODS presigner_get_scenario FOR TESTING RAISING /aws1/cx_rt_generic cx_uuid_error. METHODS setup RAISING /aws1/cx_rt_generic zcx_aws1_ex_generic. METHODS teardown RAISING /aws1/cx_rt_generic zcx_aws1_ex_generic. @@ -75,4 +75,39 @@ CLASS ltc_zcl_aws1_s3_scenario IMPLEMENTATION. act = lv_found msg = |Bucket { av_bucket } should have been deleted| ). ENDMETHOD. + + METHOD presigner_get_scenario. + " we don't show the customer the bucket creation in this scenario. + " So we'll create a separate bucket just for this scenario + DATA(lo_session) = /aws1/cl_rt_session_aws=>create( cv_pfl ). + DATA(lo_s3) = /aws1/cl_s3_factory=>create( lo_session ). + + DATA(lv_region) = CONV /aws1/s3_bucketlocationcnstrnt( lo_session->get_region( ) ). + DATA lo_constraint TYPE REF TO /aws1/cl_s3_createbucketconf. + IF lv_region = 'us-east-1'. + CLEAR lo_constraint. + ELSE. + lo_constraint = NEW /aws1/cl_s3_createbucketconf( lv_region ). + ENDIF. + + DATA(lv_uuid) = cl_system_uuid=>if_system_uuid_static~create_uuid_c32( ). + TRANSLATE lv_uuid TO LOWER CASE. + DATA(lv_bucket_name) = |sap-abap-s3-scenario-presigner-{ lv_uuid }|. + + lo_s3->createbucket( + iv_bucket = lv_bucket_name + io_createbucketconfiguration = lo_constraint ). + + + DATA(lv_url) = ao_s3_scenario->presigner_get( + iv_bucket_name = lv_bucket_name + iv_key = cv_file ). + ASSERT lv_url IS NOT INITIAL. + + " cleanup + lo_s3->deleteobject( iv_bucket = lv_bucket_name iv_key = cv_file ). + lo_s3->deletebucket( iv_bucket = lv_bucket_name ). + + ENDMETHOD. + ENDCLASS. diff --git a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.xml b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.xml index 97af3930292..6bae44f2d78 100644 --- a/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.xml +++ b/sap-abap/services/s3/zcl_aws1_s3_scenario.clas.xml @@ -18,7 +18,56 @@ E Getting started with Amazon S3 buckets and objects. + + PRESIGNER_GET + E + Getting started with Amazon S3 buckets and objects. + + + + GETTING_STARTED_WITH_S3 + /AWS1/CX_RT_NO_AUTH_GENERIC + E + Generic lack of authorization + + + GETTING_STARTED_WITH_S3 + /AWS1/CX_RT_SERVICE_GENERIC + E + Generic Service call error + + + GETTING_STARTED_WITH_S3 + /AWS1/CX_RT_TECHNICAL_GENERIC + E + Technical errors + + + PRESIGNER_GET + /AWS1/CX_RT_NO_AUTH_GENERIC + E + Generic lack of authorization + + + PRESIGNER_GET + /AWS1/CX_RT_SERVICE_GENERIC + E + Generic Service call error + + + PRESIGNER_GET + /AWS1/CX_RT_TECHNICAL_GENERIC + E + Technical errors + + + PRESIGNER_GET + IV_KEY + E + Object Key + + diff --git a/swift/example_code/lambda/using-lambda-runtime/Package.swift b/swift/example_code/lambda/using-lambda-runtime/Package.swift index ba571c455fa..c3062680ec6 100644 --- a/swift/example_code/lambda/using-lambda-runtime/Package.swift +++ b/swift/example_code/lambda/using-lambda-runtime/Package.swift @@ -1,4 +1,4 @@ -// swift-tools-version: 5.10 +// swift-tools-version: 6.0 // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 @@ -20,7 +20,7 @@ let package = Package( dependencies: [ .package( url: "https://github.com/swift-server/swift-aws-lambda-runtime.git", - from: "1.0.0-alpha"), + from: "2.0.0-beta.1"), .package(url: "https://github.com/awslabs/aws-sdk-swift.git", from: "1.0.0"), ], diff --git a/swift/example_code/lambda/using-lambda-runtime/Sources/lambda.swift b/swift/example_code/lambda/using-lambda-runtime/Sources/lambda.swift index 98087226074..4ee32315352 100644 --- a/swift/example_code/lambda/using-lambda-runtime/Sources/lambda.swift +++ b/swift/example_code/lambda/using-lambda-runtime/Sources/lambda.swift @@ -4,7 +4,7 @@ // snippet-start:[lambda.swift.function.imports] import Foundation import AWSLambdaRuntime -import AWSS3 +@preconcurrency import AWSS3 import protocol AWSClientRuntime.AWSServiceError import enum Smithy.ByteStream @@ -38,125 +38,85 @@ enum S3ExampleLambdaErrors: Error { /// A required environment variable is missing. The missing variable is /// specified. case noEnvironmentVariable(String) - /// The Amazon Simple Storage Service (S3) client couldn't be created. - case noS3Client } // snippet-end:[lambda.swift.function.errors] // snippet-end:[lambda.swift.function.types] -// snippet-start:[lambda.swift.function.handler] -/// A Swift AWS Lambda Runtime `LambdaHandler` lets you both perform needed -/// initialization and handle AWS Lambda requests. There are other handler -/// protocols available for other use cases. -@main -struct S3ExampleLambda: LambdaHandler { - let s3Client: S3Client? - - // snippet-start:[lambda.swift.function.handler.init] - /// Initialize the AWS Lambda runtime. - /// - /// ^ The logger is a standard Swift logger. You can control the verbosity - /// by setting the `LOG_LEVEL` environment variable. - init(context: LambdaInitializationContext) async throws { - // Display the `LOG_LEVEL` configuration for this process. - context.logger.info( - "Log Level env var : \(ProcessInfo.processInfo.environment["LOG_LEVEL"] ?? "info" )" - ) - - // Initialize the Amazon S3 client. This single client is used for every - // request. - let currentRegion = ProcessInfo.processInfo.environment["AWS_REGION"] ?? "us-east-1" - self.s3Client = try? S3Client(region: currentRegion) - } - // snippet-end:[lambda.swift.function.handler.init] - - // snippet-start:[lambda.swift.function.handler.putobject] - /// Write the specified text into a given Amazon S3 bucket. The object's - /// name is based on the current time. - /// - /// - Parameters: - /// - s3Client: The `S3Client` to use when sending the object to the - /// bucket. - /// - bucketName: The name of the Amazon S3 bucket to put the object - /// into. - /// - body: The string to write into the new object. - /// - /// - Returns: A string indicating the name of the file created in the AWS - /// S3 bucket. - private func putObject(client: S3Client, - bucketName: String, - body: String) async throws -> String { - // Generate an almost certainly unique object name based on the current - // timestamp. - let objectName = "\(Int(Date().timeIntervalSince1970*1_000_000)).txt" - - // Create a Smithy `ByteStream` that represents the string to write into - // the bucket. - let inputStream = Smithy.ByteStream.data(body.data(using: .utf8)) - - // Store the text into an object in the Amazon S3 bucket. - let putObjectRequest = PutObjectInput( +let currentRegion = ProcessInfo.processInfo.environment["AWS_REGION"] ?? "us-east-1" +let s3Client = try S3Client(region: currentRegion) + +// snippet-start:[lambda.swift.function.putobject] +/// Create a new object on Amazon S3 whose name is based on the current +/// timestamp, containing the text specified. +/// +/// - Parameters: +/// - body: The text to store in the new S3 object. +/// - bucketName: The name of the Amazon S3 bucket to put the new object +/// into. +/// +/// - Throws: Errors from `PutObject`. +/// +/// - Returns: The name of the new Amazon S3 object that contains the +/// specified body text. +func putObject(body: String, bucketName: String) async throws -> String { + // Generate an almost certainly unique object name based on the current + // timestamp. + + let objectName = "\(Int(Date().timeIntervalSince1970*1_000_000)).txt" + + // Create a Smithy `ByteStream` that represents the string to write into + // the bucket. + + let inputStream = Smithy.ByteStream.data(body.data(using: .utf8)) + + // Store the text into an object in the Amazon S3 bucket. + + _ = try await s3Client.putObject( + input: PutObjectInput( body: inputStream, bucket: bucketName, key: objectName ) - let _ = try await client.putObject(input: putObjectRequest) + ) + + // Return the name of the file + + return objectName +} +// snippet-end:[lambda.swift.function.putobject] + +// snippet-start:[lambda.swift.function.runtime] +let runtime = LambdaRuntime { + (event: Request, context: LambdaContext) async throws -> Response in - // Return the name of the file. - return objectName + var responseMessage: String + + // Get the name of the bucket to write the new object into from the + // environment variable `BUCKET_NAME`. + guard let bucketName = ProcessInfo.processInfo.environment["BUCKET_NAME"] else { + context.logger.error("Set the environment variable BUCKET_NAME to the name of the S3 bucket to write files to.") + throw S3ExampleLambdaErrors.noEnvironmentVariable("BUCKET_NAME") } - // snippet-end:[lambda.swift.function.handler.putobject] - - // snippet-start:[lambda.swift.function.handler.handle] - /// The Lambda function's entry point. Called by the Lambda runtime. - /// - /// - Parameters: - /// - event: The `Request` describing the request made by the - /// client. - /// - context: A `LambdaContext` describing the context in - /// which the lambda function is running. - /// - /// - Returns: A `Response` object that will be encoded to JSON and sent - /// to the client by the Lambda runtime. - func handle(_ event: Request, context: LambdaContext) async throws -> Response { - // Get the bucket name from the environment. - guard let bucketName = ProcessInfo.processInfo.environment["BUCKET_NAME"] else { - throw S3ExampleLambdaErrors.noEnvironmentVariable("BUCKET_NAME") - } - - // Make sure the `S3Client` is valid. - guard let s3Client else { - throw S3ExampleLambdaErrors.noS3Client - } - - // Call the `putObject` function to store the object on Amazon S3. - var responseMessage: String - do { - let filename = try await putObject( - client: s3Client, - bucketName: bucketName, - body: event.body) - - // Generate the response text. - responseMessage = "The Lambda function has successfully stored your data in S3 with name \(filename)'" - - // Send the success notification to the logger. - context.logger.info("Data successfully stored in S3.") - } catch let error as AWSServiceError { - // Generate the error message. - responseMessage = "The Lambda function encountered an error and your data was not saved. Root cause: \(error.errorCode ?? "") - \(error.message ?? "")" - - // Send the error message to the logger. - context.logger.error("Failed to upload data to Amazon S3.") - } - - // Return the response message. The AWS Lambda runtime will send it to the - // client. - return Response( - req_id: context.requestID, - body: responseMessage) + + do { + let filename = try await putObject(body: event.body, bucketName: bucketName) + + // Generate the response text and update the log. + responseMessage = "The Lambda function has successfully stored your data in S3 with name '\(filename)'" + context.logger.info("Data successfully stored in S3.") + } catch let error as AWSServiceError { + // Generate the error message and update the log. + responseMessage = "The Lambda function encountered an error and your data was not saved. Root cause: \(error.errorCode ?? "") - \(error.message ?? "")" + context.logger.error("Failed to upload data to Amazon S3.") } - // snippet-end:[lambda.swift.function.handler.handle] + + return Response(req_id: context.requestID, body: responseMessage) } -// snippet-end:[lambda.swift.function.handler] +// snippet-end:[lambda.swift.function.runtime] + +// Start up the runtime. + +// snippet-start:[lambda.swift.function.start] +try await runtime.run() +// snippet-end:[lambda.swift.function.start] // snippet-end:[lambda.swift.function.complete]