diff --git a/.doc_gen/metadata/batch_metadata.yaml b/.doc_gen/metadata/batch_metadata.yaml index e50b4b1594b..3d3c07b1ddc 100644 --- a/.doc_gen/metadata/batch_metadata.yaml +++ b/.doc_gen/metadata/batch_metadata.yaml @@ -174,8 +174,6 @@ batch_CreateComputeEnvironment: services: batch: {CreateComputeEnvironment} batch_Scenario: - title: Learn core operations for'&BATCHlong; using an &AWS; SDK - title_abbrev: Learn &BATCH; core operations synopsis_list: - Create an &BATCH; compute environment. - Check the status of the compute environment. @@ -185,7 +183,7 @@ batch_Scenario: - Get a list of jobs applicable to the job queue. - Check the status of job. - Delete &BATCH; resources. - category: Scenarios + category: Basics languages: Java: versions: diff --git a/.doc_gen/metadata/bedrock-agent-runtime_metadata.yaml b/.doc_gen/metadata/bedrock-agent-runtime_metadata.yaml index 9d2e42ce39b..d667c22cb56 100644 --- a/.doc_gen/metadata/bedrock-agent-runtime_metadata.yaml +++ b/.doc_gen/metadata/bedrock-agent-runtime_metadata.yaml @@ -30,5 +30,34 @@ bedrock-agent-runtime_InvokeFlow: - description: snippet_files: - javascriptv3/example_code/bedrock-agent-runtime/actions/invoke-flow.js + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-agent-runtime + excerpts: + - description: Invoke a flow. + snippet_tags: + - python.example_code.bedrock-agent-runtime.InvokeFlow + services: + bedrock-agent-runtime: {InvokeFlow} + +bedrock-agent-runtime_Scenario_ConverseWithFlow: + title: Converse with an &BRlong; flow + synopsis: use InvokeFlow to converse with an &BRlong; flow that includes an agent node. + category: Basics + guide_topic: + title: Converse with an &BRlong; flow + url: bedrock/latest/userguide/flows-multi-turn-invocation.html + languages: + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-agent-runtime + sdkguide: + excerpts: + - description: + snippet_tags: + - python.example_code.bedrock-agent-runtime.flow_conversation.complete + services: bedrock-agent-runtime: {InvokeFlow} diff --git a/.doc_gen/metadata/bedrock-runtime_metadata.yaml b/.doc_gen/metadata/bedrock-runtime_metadata.yaml index 5eeddc97e7f..36cb8c49cab 100644 --- a/.doc_gen/metadata/bedrock-runtime_metadata.yaml +++ b/.doc_gen/metadata/bedrock-runtime_metadata.yaml @@ -21,6 +21,20 @@ bedrock-runtime_Hello: - description: snippet_files: - javascriptv3/example_code/bedrock-runtime/hello.js + + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + sdkguide: + excerpts: + - description: Send a prompt to a model with the InvokeModel operation. + snippet_tags: + - bedrock-runtime.example_code.hello_bedrock_invoke.complete + - description: Send a user message to a model with the Converse operation. + snippet_tags: + - bedrock-runtime.example_code.hello_bedrock_converse.complete + services: bedrock-runtime: {InvokeModel} @@ -69,6 +83,54 @@ bedrock-runtime_Converse_Ai21LabsJurassic2: services: bedrock-runtime: {Converse} +bedrock-runtime_Converse_AmazonNovaText: + title: Invoke Amazon Nova on &BR; using Bedrock's Converse API + title_abbrev: "Converse" + synopsis: send a text message to Amazon Nova, using Bedrock's Converse API. + category: Amazon Nova + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova using Bedrock's Converse API with the async Java client. + snippet_tags: + - bedrock-runtime.java2.ConverseAsync_AmazonNovaText + - description: Send a text message to Amazon Nova, using Bedrock's Converse API. + snippet_tags: + - bedrock-runtime.java2.Converse_AmazonNovaText + JavaScript: + versions: + - sdk_version: 3 + github: javascriptv3/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova, using Bedrock's Converse API. + snippet_tags: + - javascript.v3.bedrock-runtime.Converse_AmazonTitanText + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/Bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova, using Bedrock's Converse API. + snippet_tags: + - BedrockRuntime.dotnetv3.Converse_AmazonNovaText + - description: Send a conversation of messages to Amazon Nova using Bedrock's Converse API with a tool configuration. + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.SendConverseRequest + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova, using Bedrock's Converse API. + snippet_tags: + - python.example_code.bedrock-runtime.Converse_AmazonNovaText + services: + bedrock-runtime: {Converse} + bedrock-runtime_Converse_AmazonTitanText: title: Invoke Amazon Titan Text on &BR; using Bedrock's Converse API title_abbrev: "Converse" @@ -113,6 +175,60 @@ bedrock-runtime_Converse_AmazonTitanText: services: bedrock-runtime: {Converse} +bedrock-runtime_Scenario_ToolUse: + title: "A tool use example illustrating how to connect AI models on &BR; with a custom tool or API" + title_abbrev: "Tool use with the Converse API" + synopsis: "build a typical interaction between an application, a generative AI model, and connected tools or APIs to mediate interactions between the AI and the outside world. It uses the example of connecting an external weather API to the AI model so it can provide real-time weather information based on user input." + category: Scenarios + languages: + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario + excerpts: + - description: "The primary execution of the scenario flow. This scenario orchestrates the conversation between the user, the &BR; Converse API, and a weather tool." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.Scenario + - description: "The weather tool used by the demo. This file defines the tool specification and implements the logic to retrieve weather data using from the Open-Meteo API." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.WeatherTool + - description: "The Converse API action with a tool configuration." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.SendConverseRequest + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + excerpts: + - description: "The primary execution script of the demo. This script orchestrates the conversation between the user, the &BR; Converse API, and a weather tool." + snippet_files: + - python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/tool_use_demo.py + - description: "The weather tool used by the demo. This script defines the tool specification and implements the logic to retrieve weather data using from the Open-Meteo API." + snippet_files: + - python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/weather_tool.py + Rust: + versions: + - sdk_version: 1 + github: rustv1/examples/bedrock-runtime + excerpts: + - description: "The primary scenario and logic for the demo. This orchestrates the conversation between the user, the &BR; Converse API, and a weather tool." + snippet_tags: + - rust.bedrock-runtime.Converse_AnthropicClaude.tool-use + - description: "The weather tool used by the demo. This script defines the tool specification and implements the logic to retrieve weather data using from the Open-Meteo API." + snippet_tags: + - rust.bedrock-runtime.Converse_AnthropicClaude.tool-use.weather-tool + - description: "Utilities to print the Message Content Blocks." + snippet_tags: + - rust.bedrock-runtime.Converse_AnthropicClaude.tool-use.user-interface + - description: "Use statements, Error utility, and constants." + snippet_tags: + - rust.bedrock-runtime.Converse_AnthropicClaude.tool-use.supporting + services: + bedrock-runtime: {Converse} + bedrock-runtime_Converse_AnthropicClaude: title: Invoke Anthropic Claude on &BR; using Bedrock's Converse API title_abbrev: "Converse" @@ -301,6 +417,47 @@ bedrock-runtime_Converse_Mistral: bedrock-runtime: {Converse} # Converse Stream +bedrock-runtime_ConverseStream_AmazonNovaText: + title: Invoke Amazon Nova on &BR; using Bedrock's Converse API with a response stream + title_abbrev: "ConverseStream" + synopsis: send a text message to Amazon Nova, using Bedrock's Converse API and process the response stream in real-time. + category: Amazon Nova + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova using Bedrock's Converse API and process the response stream in real-time. + snippet_tags: + - bedrock-runtime.java2.ConverseStream_AmazonNovaText + JavaScript: + versions: + - sdk_version: 3 + github: javascriptv3/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova using Bedrock's Converse API and process the response stream in real-time. + snippet_tags: + - javascript.v3.bedrock-runtime.Converse_Mistral + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/Bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova, using Bedrock's Converse API and process the response stream in real-time. + snippet_tags: + - BedrockRuntime.dotnetv3.ConverseStream_AmazonNovaText + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + excerpts: + - description: Send a text message to Amazon Nova, using Bedrock's Converse API and process the response stream in real-time. + snippet_tags: + - python.example_code.bedrock-runtime.ConverseStream_AmazonNovaText + services: + bedrock-runtime: {ConverseStream} + bedrock-runtime_ConverseStream_AmazonTitanText: title: Invoke Amazon Titan Text on &BR; using Bedrock's Converse API with a response stream title_abbrev: "ConverseStream" @@ -590,6 +747,14 @@ bedrock-runtime_InvokeModel_TitanText: - description: Use the Invoke Model API to send a text message. snippet_tags: - bedrock-runtime.java2.InvokeModel_AmazonTitanText + Kotlin: + versions: + - sdk_version: 1 + github: kotlin/services/bedrock-runtime + excerpts: + - description: Use the Invoke Model API to generate a short story. + snippet_tags: + - bedrock-runtime.kotlin.InvokeModel_AmazonTitanText .NET: versions: - sdk_version: 3 @@ -918,7 +1083,7 @@ bedrock-runtime_InvokeModelWithResponseStream_AnthropicClaude: excerpts: - description: Use the Invoke Model API to send a text message and process the response stream in real-time. snippet_tags: - - gov2.bedrock-runtime.InvokeModelWrapper.struct + - gov2.bedrock-runtime.InvokeModelWithResponseStreamWrapper.struct - gov2.bedrock-runtime.InvokeModelWithResponseStream JavaScript: versions: @@ -1072,6 +1237,47 @@ bedrock-runtime_InvokeModelWithResponseStream_MistralAi: bedrock-runtime: {InvokeModelWithResponseStream} # Image Generation Models +bedrock-runtime_InvokeModel_AmazonNovaImageGeneration: + title: Invoke Amazon Nova Canvas on &BR; to generate an image + title_abbrev: "InvokeModel" + synopsis: invoke Amazon Nova Canvas on &BR; to generate an image. + category: Amazon Nova Canvas + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/bedrock-runtime + excerpts: + - description: Create an image with Amazon Nova Canvas. + snippet_tags: + - bedrock-runtime.java2.InvokeModel_AmazonNovaImageGeneration + JavaScript: + versions: + - sdk_version: 3 + github: javascriptv3/example_code/bedrock-runtime + excerpts: + - description: Create an image with Amazon Nova Canvas. + snippet_tags: + - javascript.v3.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/Bedrock-runtime + excerpts: + - description: Create an image with Amazon Nova Canvas. + snippet_tags: + - BedrockRuntime.dotnetv3.InvokeModel_AmazonNovaImageGeneration + Python: + versions: + - sdk_version: 3 + github: python/example_code/bedrock-runtime + excerpts: + - description: Create an image with the Amazon Nova Canvas. + snippet_tags: + - python.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration + services: + bedrock-runtime: {InvokeModel} + bedrock-runtime_InvokeModel_TitanImageGenerator: title: Invoke Amazon Titan Image on &BR; to generate an image title_abbrev: "InvokeModel" @@ -1191,6 +1397,32 @@ bedrock-runtime_InvokeModelWithResponseStream_TitanTextEmbeddings: bedrock-runtime: {InvokeModel} # Tool use scenarios +bedrock-runtime_Scenario_ToolUseDemo_AmazonNova: + title: "A tool use demo illustrating how to connect AI models on &BR; with a custom tool or API" + title_abbrev: "Scenario: Tool use with the Converse API" + synopsis: "build a typical interaction between an application, a generative AI model, and connected tools or APIs to mediate interactions between the AI and the outside world. It uses the example of connecting an external weather API to the AI model so it can provide real-time weather information based on user input." + category: Amazon Nova + languages: + .NET: + versions: + - sdk_version: 3 + github: dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario + excerpts: + - description: "The primary execution of the scenario flow. This scenario orchestrates the conversation between the user, the &BR; Converse API, and a weather tool." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.Scenario + - description: "The weather tool used by the demo. This file defines the tool specification and implements the logic to retrieve weather data using from the Open-Meteo API." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.WeatherTool + - description: "The Converse API action with a tool configuration." + genai: some + snippet_tags: + - Bedrock.ConverseTool.dotnetv3.SendConverseRequest + services: + bedrock-runtime: {Converse} + bedrock-runtime_Scenario_ToolUseDemo_AnthropicClaude: title: "A tool use demo illustrating how to connect AI models on &BR; with a custom tool or API" title_abbrev: "Scenario: Tool use with the Converse API" @@ -1225,7 +1457,6 @@ bedrock-runtime_Scenario_ToolUseDemo_AnthropicClaude: - description: "Use statements, Error utility, and constants." snippet_tags: - rust.bedrock-runtime.Converse_AnthropicClaude.tool-use.supporting - services: bedrock-runtime: {Converse} diff --git a/.doc_gen/metadata/cross_metadata.yaml b/.doc_gen/metadata/cross_metadata.yaml index db05286b430..01931cd3ea2 100644 --- a/.doc_gen/metadata/cross_metadata.yaml +++ b/.doc_gen/metadata/cross_metadata.yaml @@ -16,7 +16,8 @@ cross_MessageProcessingFrameworkTutorial: cross_FSA: title: Create an application that analyzes customer feedback and synthesizes audio title_abbrev: Create an application to analyze customer feedback - synopsis: create an application that analyzes customer comment cards, translates them from their original language, determines + synopsis: + create an application that analyzes customer comment cards, translates them from their original language, determines their sentiment, and generates an audio file from the translated text. category: Scenarios languages: @@ -128,7 +129,8 @@ cross_SQSMessageApp: cross_RDSDataTracker: title: Create an &AUR; Serverless work item tracker title_abbrev: Create an &AUR; Serverless work item tracker - synopsis: create a web application that tracks work items in an &AURlong; Serverless database and uses &SESlong; (&SES;) + synopsis: + create a web application that tracks work items in an &AURlong; Serverless database and uses &SESlong; (&SES;) to send reports. category: Scenarios languages: @@ -242,7 +244,8 @@ cross_DynamoDBDataTracker: cross_ApiGatewayDataTracker: title: Create an &ABP; REST API to track COVID-19 data title_abbrev: Create a REST API to track COVID-19 data - synopsis: create a REST API that simulates a system to track daily cases of COVID-19 in the United States, using fictional + synopsis: + create a REST API that simulates a system to track daily cases of COVID-19 in the United States, using fictional data. category: Scenarios languages: @@ -276,7 +279,8 @@ cross_ApiGatewayWebsocketChat: cross_AuroraRestLendingLibrary: title: Create a lending library REST API title_abbrev: Create a lending library REST API - synopsis: create a lending library where patrons can borrow and return books by using a REST API backed by an &AURlong; + synopsis: + create a lending library where patrons can borrow and return books by using a REST API backed by an &AURlong; database. category: Scenarios languages: @@ -318,8 +322,6 @@ cross_TextractExplorer: versions: - sdk_version: 3 block_content: cross_TextractExplorer_JavaScript_block.xml - add_services: - cognito-identity: Python: versions: - sdk_version: 3 @@ -327,6 +329,7 @@ cross_TextractExplorer: block_content: cross_TextractExplorer_Python_block.xml service_main: textract services: + cognito-identity: s3: sns: sqs: @@ -379,16 +382,10 @@ cross_LambdaAPIGateway: versions: - sdk_version: 2 block_content: cross_LambdaAPIGateway_Java_block.xml - add_services: - dynamodb: - sns: JavaScript: versions: - sdk_version: 3 block_content: cross_LambdaAPIGateway_JavaScript_block.xml - add_services: - dynamodb: - sns: Python: versions: - sdk_version: 3 @@ -397,7 +394,9 @@ cross_LambdaAPIGateway: service_main: lambda services: api-gateway: + dynamodb: lambda: + sns: cross_LambdaScheduledEvents: title: Use scheduled events to invoke a &LAM; function title_abbrev: Use scheduled events to invoke a &LAM; function @@ -408,27 +407,22 @@ cross_LambdaScheduledEvents: versions: - sdk_version: 2 block_content: cross_LambdaScheduledEvents_Java_block.xml - add_services: - dynamodb: - sns: JavaScript: versions: - sdk_version: 3 block_content: cross_LambdaScheduledEvents_JavaScript_block.xml - add_services: - dynamodb: - sns: Python: versions: - sdk_version: 3 github: python/example_code/lambda block_content: cross_LambdaScheduledEvents_Python_block.xml - add_services: - cloudwatch-logs: service_main: lambda services: + cloudwatch-logs: + dynamodb: eventbridge: lambda: + sns: cross_ServerlessWorkflows: title: Use &SFN; to invoke &LAM; functions title_abbrev: Use &SFN; to invoke &LAM; functions @@ -520,20 +514,18 @@ cross_RekognitionVideoDetection: versions: - sdk_version: 2 block_content: cross_RekognitionVideoAnalyzer_Java_block.xml - add_services: - s3: - ses: Python: versions: - sdk_version: 3 github: python/example_code/rekognition block_content: cross_RekognitionVideoDetection_Python_block.xml - add_services: - sns: - sqs: service_main: rekognition services: rekognition: + s3: + ses: + sns: + sqs: cross_DetectFaces: title: Detect faces in an image using an &AWS; SDK title_abbrev: Detect faces in an image @@ -608,7 +600,8 @@ cross_LambdaForBrowser: cross_ResilientService: title: Build and manage a resilient service using an &AWS; SDK title_abbrev: Build and manage a resilient service - synopsis: create a load-balanced web service that returns book, movie, and song recommendations. The example shows how the + synopsis: + create a load-balanced web service that returns book, movie, and song recommendations. The example shows how the service responds to failures, and how to restructure the service for more resilience when failures occur. synopsis_list: - Use an &ASlong; group to create &EC2long; (&EC2;) instances based on a launch template and to keep the number of instances @@ -699,12 +692,38 @@ cross_ResilientService: snippet_files: - javascriptv3/example_code/cross-services/wkflw-resilient-service/steps-destroy.js services: - auto-scaling: {CreateAutoScalingGroup, DescribeAutoScalingGroups, TerminateInstanceInAutoScalingGroup, AttachLoadBalancerTargetGroups, - DeleteAutoScalingGroup, UpdateAutoScalingGroup} - ec2: {DescribeIamInstanceProfileAssociations, ReplaceIamInstanceProfileAssociation, RebootInstances, CreateLaunchTemplate, - DeleteLaunchTemplate, DescribeAvailabilityZones, DescribeInstances, DescribeVpcs, DescribeSubnets} - elastic-load-balancing-v2: {DescribeLoadBalancers, CreateTargetGroup, DescribeTargetGroups, DeleteTargetGroup, CreateLoadBalancer, - CreateListener, DeleteLoadBalancer, DescribeTargetHealth} + auto-scaling: + { + CreateAutoScalingGroup, + DescribeAutoScalingGroups, + TerminateInstanceInAutoScalingGroup, + AttachLoadBalancerTargetGroups, + DeleteAutoScalingGroup, + UpdateAutoScalingGroup, + } + ec2: + { + DescribeIamInstanceProfileAssociations, + ReplaceIamInstanceProfileAssociation, + RebootInstances, + CreateLaunchTemplate, + DeleteLaunchTemplate, + DescribeAvailabilityZones, + DescribeInstances, + DescribeVpcs, + DescribeSubnets, + } + elastic-load-balancing-v2: + { + DescribeLoadBalancers, + CreateTargetGroup, + DescribeTargetGroups, + DeleteTargetGroup, + CreateLoadBalancer, + CreateListener, + DeleteLoadBalancer, + DescribeTargetHealth, + } iam: {CreateInstanceProfile, DeleteInstanceProfile} cross_FMPlayground: title: Create a sample application that offers playgrounds to interact with &BR; foundation models using an &AWS; SDK @@ -854,7 +873,8 @@ cross_CognitoAutoConfirmUser: snippet_files: - javascriptv3/example_code/cross-services/wkflw-pools-triggers/actions/dynamodb-actions.js services: - cognito-identity-provider: {UpdateUserPool, SignUp, InitiateAuth, DeleteUser} + cognito-identity-provider: + {UpdateUserPool, SignUp, InitiateAuth, DeleteUser} lambda: {} cross_CognitoAutoMigrateUser: title: Automatically migrate known &COG; users with a &LAM; function using an &AWS; SDK @@ -899,7 +919,15 @@ cross_CognitoAutoMigrateUser: snippet_tags: - gov2.cognito-identity-provider.Resources.complete services: - cognito-identity-provider: {UpdateUserPool, SignUp, InitiateAuth, ForgotPassword, ConfirmForgotPassword, DeleteUser} + cognito-identity-provider: + { + UpdateUserPool, + SignUp, + InitiateAuth, + ForgotPassword, + ConfirmForgotPassword, + DeleteUser, + } lambda: {} cross_CognitoCustomActivityLog: title: Write custom activity data with a &LAM; function after &COG; user authentication using an &AWS; SDK @@ -944,7 +972,14 @@ cross_CognitoCustomActivityLog: snippet_tags: - gov2.cognito-identity-provider.Resources.complete services: - cognito-identity-provider: {UpdateUserPool, InitiateAuth, DeleteUser, AdminCreateUser, AdminSetUserPassword} + cognito-identity-provider: + { + UpdateUserPool, + InitiateAuth, + DeleteUser, + AdminCreateUser, + AdminSetUserPassword, + } lambda: {} cross_MonitorDynamoDB: title: Monitor performance of &DDBlong; using an &AWS; SDK diff --git a/.doc_gen/metadata/entityresolution_metadata.yaml b/.doc_gen/metadata/entityresolution_metadata.yaml new file mode 100644 index 00000000000..b318b6c2a41 --- /dev/null +++ b/.doc_gen/metadata/entityresolution_metadata.yaml @@ -0,0 +1,162 @@ +entityresolution_Hello: + title: Hello &ERlong; + title_abbrev: Hello &ER; + synopsis: get started using &ER;. + category: Hello + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_hello.main + services: + entityresolution: {listMatchingWorkflows} +entityresolution_DeleteSchemaMapping: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_delete_mappings.main + services: + entityresolution: {DeleteSchemaMapping} +entityresolution_TagEntityResource: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_tag_resource.main + services: + entityresolution: {TagEntityResource} +entityresolution_CreateMatchingWorkflow: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_create_matching_workflow.main + services: + entityresolution: {CreateMatchingWorkflow} +entityresolution_CheckWorkflowStatus: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_check_matching_workflow.main + services: + entityresolution: {CheckWorkflowStatus} +entityresolution_StartMatchingJob: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_start_job.main + services: + entityresolution: {StartMatchingJob} +entityresolution_GetMatchingJob: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_get_job.main + services: + entityresolution: {GetMatchingJob} +entityresolution_DeleteMatchingWorkflow: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_delete_matching_workflow.main + services: + entityresolution: {DeleteMatchingWorkflow} +entityresolution_ListSchemaMappings: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_list_mappings.main + services: + entityresolution: {ListSchemaMappings} +entityresolution_GetSchemaMapping: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_get_schema_mapping.main + services: + entityresolution: {GetSchemaMapping} +entityresolution_CreateSchemaMapping: + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + excerpts: + - description: + snippet_tags: + - entityres.java2_create_schema.main + services: + entityresolution: {CreateSchemaMapping} +entityresolution_Scenario: + synopsis_list: + - Create Schema Mapping. + - Create an &ERlong; workflow. + - Start the matching job for the workflow. + - Get details for the matching job. + - Get Schema Mapping. + - List all Schema Mappings. + - Tag the Schema Mapping resource. + - Delete the &ERlong; Assets. + category: Basics + languages: + Java: + versions: + - sdk_version: 2 + github: javav2/example_code/entityresolution + sdkguide: + excerpts: + - description: Run an interactive scenario demonstrating &ERlong; features. + snippet_tags: + - entityres.java2_scenario.main + - description: A wrapper class for &ERlong; SDK methods. + snippet_tags: + - entityres.java2_actions.main + services: + entityresolution: {} diff --git a/.doc_gen/metadata/iam_metadata.yaml b/.doc_gen/metadata/iam_metadata.yaml index 383f52d39ff..480b9d7ee9f 100644 --- a/.doc_gen/metadata/iam_metadata.yaml +++ b/.doc_gen/metadata/iam_metadata.yaml @@ -889,6 +889,15 @@ iam_UpdateAccessKey: - description: snippet_tags: - iam.cpp.update_access_key.code + Bash: + versions: + - sdk_version: 2 + github: aws-cli/bash-linux/iam + sdkguide: + excerpts: + - description: + snippet_tags: + - aws-cli.bash-linux.iam.UpdateAccessKey services: iam: {UpdateAccessKey} iam_Scenario_ManageAccessKeys: diff --git a/.doc_gen/metadata/iot_metadata.yaml b/.doc_gen/metadata/iot_metadata.yaml index 8c75fada4d3..6339c137a6f 100644 --- a/.doc_gen/metadata/iot_metadata.yaml +++ b/.doc_gen/metadata/iot_metadata.yaml @@ -429,7 +429,18 @@ iot_CreateThing: services: iot: {CreateThing} iot_Scenario: - synopsis: work with &IoT; device management. + synopsis_list: + - Create an &IoT; Thing. + - Generate a device certificate. + - Update an &IoT; Thing with Attributes. + - Return a unique endpoint. + - List your &IoT; certificates. + - Create an &IoT; shadow. + - Write out state information. + - Creates a rule. + - List your rules. + - Search things using the Thing name. + - Delete an &IoT; Thing. category: Basics languages: Kotlin: diff --git a/.doc_gen/metadata/iot_sitewise_metadata.yaml b/.doc_gen/metadata/iot_sitewise_metadata.yaml index 6b067573271..d6dd34950e5 100644 --- a/.doc_gen/metadata/iot_sitewise_metadata.yaml +++ b/.doc_gen/metadata/iot_sitewise_metadata.yaml @@ -444,7 +444,16 @@ iotsitewise_CreateAssetModel: services: iotsitewise: {CreateAssetModel} iotsitewise_Scenario: - synopsis: learn core operations for &ITSWlong; using an &AWS; SDK. + synopsis_list: + - Create an &ITSWlong; Asset Model. + - Create an &ITSWlong; Asset. + - Retrieve the property ID values. + - Send data to an &ITSWlong; Asset. + - Retrieve the value of the &ITSWlong; Asset property. + - Create an &ITSWlong; Portal. + - Create an &ITSWlong; Gateway. + - Describe the &ITSWlong; Gateway. + - Delete the &ITSWlong; Assets. category: Basics languages: Java: diff --git a/.doc_gen/metadata/redshift_metadata.yaml b/.doc_gen/metadata/redshift_metadata.yaml index 2f4392bf8b7..d9627603d72 100644 --- a/.doc_gen/metadata/redshift_metadata.yaml +++ b/.doc_gen/metadata/redshift_metadata.yaml @@ -346,7 +346,14 @@ redshift_ExecuteStatement: services: redshift: {ExecuteStatement} redshift_Scenario: - synopsis: learn core operations for &RS; using an &AWS; SDK. + synopsis_list: + - Create a Redshift cluster. + - List databases in the cluster. + - Create a table named Movies. + - Populate the Movies table. + - Query the Movies table by year. + - Modify the Redshift cluster. + - Delete the Amazon Redshift cluster. category: Basics languages: Go: diff --git a/.doc_gen/metadata/s3_metadata.yaml b/.doc_gen/metadata/s3_metadata.yaml index 061b269d3dd..8569428e2cd 100644 --- a/.doc_gen/metadata/s3_metadata.yaml +++ b/.doc_gen/metadata/s3_metadata.yaml @@ -293,6 +293,18 @@ s3_CopyObject: - description: Copy the object. snippet_tags: - s3.JavaScript.buckets.copyObjectV3 + - description: Copy the object on condition its ETag does not match the one provided. + snippet_files: + - javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-match.js + - description: Copy the object on condition its ETag does not match the one provided. + snippet_files: + - javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-none-match.js + - description: Copy the object using on condition it has been created or modified in a given timeframe. + snippet_files: + - javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-modified-since.js + - description: Copy the object using on condition it has not been created or modified in a given timeframe. + snippet_files: + - javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-unmodified-since.js PHP: versions: - sdk_version: 3 @@ -951,6 +963,18 @@ s3_GetObject: - description: Download the object. snippet_tags: - s3.JavaScript.buckets.getobjectV3 + - description: Download the object on condition its ETag does not match the one provided. + snippet_files: + - javascriptv3/example_code/s3/actions/get-object-conditional-request-if-match.js + - description: Download the object on condition its ETag does not match the one provided. + snippet_files: + - javascriptv3/example_code/s3/actions/get-object-conditional-request-if-none-match.js + - description: Download the object using on condition it has been created or modified in a given timeframe. + snippet_files: + - javascriptv3/example_code/s3/actions/get-object-conditional-request-if-modified-since.js + - description: Download the object using on condition it has not been created or modified in a given timeframe. + snippet_files: + - javascriptv3/example_code/s3/actions/get-object-conditional-request-if-unmodified-since.js Ruby: versions: - sdk_version: 3 @@ -1602,6 +1626,9 @@ s3_PutObject: - description: Upload the object. snippet_tags: - s3.JavaScript.buckets.uploadV3 + - description: Upload the object on condition its ETag matches the one provided. + snippet_files: + - javascriptv3/example_code/s3/actions/get-object-conditional-request-if-match.js Ruby: versions: - sdk_version: 3 @@ -3617,6 +3644,29 @@ s3_Scenario_ConditionalRequests: - description: A wrapper class for S3 functions. snippet_tags: - S3ConditionalRequests.dotnetv3.S3ActionsWrapper + JavaScript: + versions: + - sdk_version: 3 + github: javascriptv3/example_code/s3/scenarios/conditional-requests + sdkguide: + excerpts: + - description: | + Entrypoint for the workflow (index.js). This orchestrates all of the steps. + Visit GitHub to see the implementation details for Scenario, ScenarioInput, ScenarioOutput, and ScenarioAction. + snippet_files: + - javascriptv3/example_code/s3/scenarios/conditional-requests/index.js + - description: Output welcome messages to the console (welcome.steps.js). + snippet_files: + - javascriptv3/example_code/s3/scenarios/conditional-requests/welcome.steps.js + - description: Deploy buckets and objects (setup.steps.js). + snippet_files: + - javascriptv3/example_code/s3/scenarios/conditional-requests/setup.steps.js + - description: Get, copy, and put objects using S3 conditional requests (repl.steps.js). + snippet_files: + - javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.js + - description: Destroy all created resources (clean.steps.js). + snippet_files: + - javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.js services: s3: {GetObject, PutObject, CopyObject} s3_Scenario_DownloadS3Directory: diff --git a/.doc_gen/metadata/sns_metadata.yaml b/.doc_gen/metadata/sns_metadata.yaml index 3b2cb00107d..82a11953148 100644 --- a/.doc_gen/metadata/sns_metadata.yaml +++ b/.doc_gen/metadata/sns_metadata.yaml @@ -56,6 +56,17 @@ sns_Hello: - description: Initialize an SNS client and and list topics in your account. snippet_tags: - javascript.v3.sns.hello + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns/basics + excerpts: + - description: The Package.swift file. + snippet_tags: + - swift.sns.basics.package + - description: The main Swift program. + snippet_tags: + - swift.sns.basics.hello services: sns: {ListTopics} sns_GetTopicAttributes: @@ -294,6 +305,13 @@ sns_ListTopics: excerpts: - snippet_tags: - sns.rust.list-topics + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns/basics + excerpts: + - snippet_tags: + - swift.sns.ListTopics SAP ABAP: versions: - sdk_version: 1 @@ -525,6 +543,13 @@ sns_CreateTopic: excerpts: - snippet_tags: - sns.rust.create-topic + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns + excerpts: + - snippet_tags: + - swift.sns.CreateTopic SAP ABAP: versions: - sdk_version: 1 @@ -607,6 +632,13 @@ sns_DeleteTopic: - snippet_tags: - python.example_code.sns.SnsWrapper - python.example_code.sns.DeleteTopic + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns + excerpts: + - snippet_tags: + - swift.sns.DeleteTopic SAP ABAP: versions: - sdk_version: 1 @@ -745,6 +777,13 @@ sns_Publish: excerpts: - snippet_tags: - sns.rust.sns-hello-world + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns + excerpts: + - snippet_tags: + - swift.sns.Publish SAP ABAP: versions: - sdk_version: 1 @@ -1067,6 +1106,17 @@ sns_Subscribe: - description: Subscribe an email address to a topic. snippet_tags: - sns.rust.sns-hello-world + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns + excerpts: + - description: Subscribe an email address to a topic. + snippet_tags: + - swift.sns.SubscribeEmail + - description: Subscribe a phone number to a topic to receive notifications by SMS. + snippet_tags: + - swift.sns.SubscribeSMS SAP ABAP: versions: - sdk_version: 1 @@ -1140,6 +1190,13 @@ sns_Unsubscribe: - snippet_tags: - python.example_code.sns.SnsWrapper - python.example_code.sns.Unsubscribe + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sns + excerpts: + - snippet_tags: + - swift.sns.Unsubscribe SAP ABAP: versions: - sdk_version: 1 diff --git a/.doc_gen/metadata/sqs_metadata.yaml b/.doc_gen/metadata/sqs_metadata.yaml index 166119d1930..f3553187992 100644 --- a/.doc_gen/metadata/sqs_metadata.yaml +++ b/.doc_gen/metadata/sqs_metadata.yaml @@ -58,6 +58,18 @@ sqs_Hello: - description: Initialize an &SQS; client and list queues. snippet_tags: - javascript.v3.sqs.hello + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: The Package.swift file. + snippet_tags: + - swift.sqs.basics.package + - description: The Swift source code, entry.swift. + snippet_tags: + - swift.sqs.basics services: sqs: {ListQueues} sqs_CreateQueue: @@ -163,6 +175,15 @@ sqs_CreateQueue: snippet_tags: - cpp.example_code.sqs.CreateQueue.config - cpp.example_code.sqs.CreateQueue + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.CreateQueue services: sqs: {CreateQueue} sqs_GetQueueUrl: @@ -320,6 +341,15 @@ sqs_ListQueues: snippet_tags: - cpp.example_code.sqs.ListQueues.config - cpp.example_code.sqs.ListQueues + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.ListQueues services: sqs: {ListQueues} sqs_DeleteQueue: @@ -414,6 +444,15 @@ sqs_DeleteQueue: snippet_tags: - cpp.example_code.sqs.DeleteQueue.config - cpp.example_code.sqs.DeleteQueue + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.DeleteQueue services: sqs: {DeleteQueue} sqs_SendMessage: @@ -649,6 +688,15 @@ sqs_ReceiveMessage: snippet_tags: - cpp.example_code.sqs.ReceiveMessage.config - cpp.example_code.sqs.ReceiveMessage + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.ReceiveMessage services: sqs: {ReceiveMessage} sqs_DeleteMessage: @@ -765,6 +813,15 @@ sqs_DeleteMessageBatch: - description: snippet_tags: - sqs.JavaScript.messages.receiveMessageV3 + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.DeleteMessageBatch services: sqs: {DeleteMessageBatch} sqs_Scenario_SendReceiveBatch: @@ -836,6 +893,15 @@ sqs_GetQueueAttributes: - description: snippet_tags: - javascript.v3.sqs.actions.GetQueueAttributes + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.GetQueueAttributes services: sqs: {GetQueueAttributes} sqs_ChangeMessageVisibility: @@ -931,6 +997,15 @@ sqs_SetQueueAttributes: - description: Configure a dead-letter queue. snippet_tags: - sqs.JavaScript.deadLetter.setQueueAttributesV3 + Swift: + versions: + - sdk_version: 1 + github: swift/example_code/sqs + sdkguide: + excerpts: + - description: + snippet_tags: + - swift.sqs.SetQueueAttributes services: sqs: {SetQueueAttributes} sqs_Scenario_TopicsAndQueues: diff --git a/.doc_gen/metadata/ssm_metadata.yaml b/.doc_gen/metadata/ssm_metadata.yaml index b3cc04d35b1..ed81ab102a8 100644 --- a/.doc_gen/metadata/ssm_metadata.yaml +++ b/.doc_gen/metadata/ssm_metadata.yaml @@ -412,7 +412,14 @@ ssm_UpdateOpsItem: services: ssm: {UpdateOpsItem} ssm_Scenario: - synopsis: work with &SYS; maintenance windows, documents, and OpsItems. + synopsis_list: + - Create a maintenance window. + - Modify the maintenance window schedule. + - Create a document. + - Send a command to a specified EC2 instance. + - Create an OpsItem. + - Update and resolve the OpsItem. + - Delete the maintenance window, OpsItem, and document. category: Basics languages: Java: diff --git a/.doc_gen/validation.yaml b/.doc_gen/validation.yaml index 3aadb80233f..fe87874aa32 100644 --- a/.doc_gen/validation.yaml +++ b/.doc_gen/validation.yaml @@ -1,6 +1,7 @@ allow_list: # Git commits - "cd5e746ec203c8c3c61647e0886a8df8c1e78e41" + - "erbucketf684533d2680435fa99d24b1bdaf5179" - "725feb26d6f73bc1d83dbbe075ae8ea991efb245" - "e9772d140489982e0e3704fea5ee93d536f1e275" # Safe look-alikes, mostly tokens and paths that happen to be 40 characters. @@ -211,6 +212,7 @@ allow_list: - "src/main/java/com/example/acm/DeleteCert" - "src/main/java/com/example/acm/ImportCert" - "EnablePropagateAdditionalUserContextData" + - "StopQueryWorkloadInsightsTopContributors" sample_files: - "README.md" - "chat_sfn_state_machine.json" diff --git a/.github/allowed-labels.yml b/.github/allowed-labels.yml index 2dd39429d1c..f6d223588a0 100644 --- a/.github/allowed-labels.yml +++ b/.github/allowed-labels.yml @@ -12,7 +12,7 @@ - name: MVP color: f5f7f9 description: "A Minimum Viable Product example to show the bare bones of how to use a service via an SDK." -- name: Workflow +- name: Feature Scenario color: f5f7f9 description: "A simple code example to show how certain tasks can be accomplished using several services and SDKs." - name: Basics diff --git a/.github/workflows/automerge-approved-prs.yml b/.github/workflows/automerge-approved-prs.yml new file mode 100644 index 00000000000..e7ea47f3775 --- /dev/null +++ b/.github/workflows/automerge-approved-prs.yml @@ -0,0 +1,32 @@ +on: # yamllint disable-line rule:truthy + pull_request_review: + types: submitted + +jobs: + approved_pr: + name: Automerge approved PRs + permissions: + contents: write + pull-requests: write + id-token: write + if: ${{ github.event.review.state == 'approved' && github.repository == 'awsdocs/aws-doc-sdk-examples' && (github.event.review.author_association == 'OWNER' || github.event.review.author_association == 'MEMBER' || github.event.review.user.login == 'aws-sdk-osds') }} + runs-on: ubuntu-latest + steps: + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::206735643321:role/ConfigureAwsCredentialsPackageRole + role-duration-seconds: 900 + role-session-name: SecretsManagerFetch + - name: Get bot user token + uses: aws-actions/aws-secretsmanager-get-secrets@v2 + with: + parse-json-secrets: true + secret-ids: | + OSDS,arn:aws:secretsmanager:us-west-2:206735643321:secret:github-aws-sdk-osds-automation-gebs9n + - name: Enable PR automerge + run: gh pr merge --auto --squash "$PR_URL" + env: + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ env.OSDS_ACCESS_TOKEN }} diff --git a/.github/workflows/dependabot-autoapprove.yml b/.github/workflows/dependabot-autoapprove.yml new file mode 100644 index 00000000000..a4228da0627 --- /dev/null +++ b/.github/workflows/dependabot-autoapprove.yml @@ -0,0 +1,37 @@ +name: Dependabot auto-approve +on: pull_request # yamllint disable-line rule:truthy +permissions: + pull-requests: write + id-token: write +jobs: + dependabot: + runs-on: ubuntu-latest + if: ${{ github.event.pull_request.user.login == 'dependabot[bot]' && github.repository == 'awsdocs/aws-doc-sdk-examples' }} + steps: + - name: Get Metadata + id: dependabot-metadata + uses: dependabot/fetch-metadata@v2 + - uses: actions/checkout@v4 + name: Clone repo + - name: Configure AWS credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-region: us-west-2 + role-to-assume: arn:aws:iam::206735643321:role/ConfigureAwsCredentialsPackageRole + role-duration-seconds: 900 + - name: Get bot user token + uses: aws-actions/aws-secretsmanager-get-secrets@v2 + with: + parse-json-secrets: true + secret-ids: | + OSDS,arn:aws:secretsmanager:us-west-2:206735643321:secret:github-aws-sdk-osds-automation-gebs9n + - name: Approve PR if not already approved + run: | + gh pr checkout "$PR_URL" + if [ "$(gh pr status --json reviewDecision - q .currentBranch.reviewDecision)" != "APPROVED" ]; then + gh pr review "$PR_URL" --approve + else echo "PR already approved" + fi + env: + PR_URL: ${{ github.event.pull_request.html_url }} + GITHUB_TOKEN: ${{ env.OSDS_ACCESS_TOKEN }} diff --git a/.github/workflows/label-checker.yml b/.github/workflows/label-checker.yml index 6d26deb93d8..3c3893ecef9 100644 --- a/.github/workflows/label-checker.yml +++ b/.github/workflows/label-checker.yml @@ -26,5 +26,5 @@ jobs: steps: - uses: docker://agilepathway/pull-request-label-checker:latest with: - one_of: Application,MVP,Workflow,Task,Bug,Basics + one_of: Application,MVP,Feature Scenario,Task,Bug,Basics repo_token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/validate-doc-metadata.yml b/.github/workflows/validate-doc-metadata.yml index 1948f05ccce..c0396eb9c01 100644 --- a/.github/workflows/validate-doc-metadata.yml +++ b/.github/workflows/validate-doc-metadata.yml @@ -16,7 +16,7 @@ jobs: - name: checkout repo content uses: actions/checkout@v4 - name: validate metadata - uses: awsdocs/aws-doc-sdk-examples-tools@2025.02.0 + uses: awsdocs/aws-doc-sdk-examples-tools@2025.08.0 with: doc_gen_only: "False" strict_titles: "True" diff --git a/.tools/test/stacks/config/targets.yaml b/.tools/test/stacks/config/targets.yaml index 7686219e298..974a05fa81f 100644 --- a/.tools/test/stacks/config/targets.yaml +++ b/.tools/test/stacks/config/targets.yaml @@ -13,10 +13,10 @@ javascriptv3: account_id: "875008041426" status: "enabled" javav2: - account_id: "667348412466" # back-up "814548047983" + account_id: "814548047983" # back-up "667348412466" status: "enabled" kotlin: - account_id: "471951630130" # back-up "814548047983" + account_id: "814548047983" # back-up "471951630130" status: "enabled" php: account_id: "733931915187" diff --git a/.tools/test/stacks/nuke/typescript/.prettierignore b/.tools/test/stacks/nuke/typescript/.prettierignore new file mode 100644 index 00000000000..41857269f92 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/.prettierignore @@ -0,0 +1 @@ +cdk.out/ diff --git a/.tools/test/stacks/nuke/typescript/Dockerfile b/.tools/test/stacks/nuke/typescript/Dockerfile new file mode 100644 index 00000000000..d451651bf7c --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/Dockerfile @@ -0,0 +1,12 @@ +FROM ghcr.io/ekristen/aws-nuke:v3.42.0 +ENV AWS_SDK_LOAD_CONFIG=1 \ + AWS_DEBUG=true +USER root +RUN apk add --no-cache \ + python3 \ + py3-pip \ + aws-cli +COPY nuke_generic_config.yaml /nuke_generic_config.yaml +COPY --chmod=755 run.sh /run.sh +USER aws-nuke +ENTRYPOINT ["/run.sh"] diff --git a/.tools/test/stacks/nuke/typescript/README.md b/.tools/test/stacks/nuke/typescript/README.md new file mode 100644 index 00000000000..2ebe1c3fc0e --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/README.md @@ -0,0 +1,52 @@ +# aws-nuke for Weathertop + +[aws-nuke](https://github.com/ekristen/aws-nuke) is an open-source tool that deletes non-default resources in a provided AWS account. It's implemented here in this directory using Cloud Development Kit (CDK) code that deploys the [official aws-nuke image](https://github.com/ekristen/aws-nuke/pkgs/container/aws-nuke) to an AWS Lambda function. + +## ⚠ Important + +This is a very destructive tool! It should not be deployed without fully understanding the impact it will have on your AWS accounts. +Please use caution and configure this tool to delete unused resources only in your lower test/sandbox environment accounts. + +## Overview + +This CDK stack is defined in [account_nuker.ts](account_nuker.ts). It includes: + +- A Docker-based Lambda function with ARM64 architecture and 1GB memory +- An IAM role with administrative permissions for the Lambda's nuking function +- An EventBridge rule that triggers the function every Sunday at midnight + +More specifically, this Lambda function is built from a [Dockerfile](Dockerfile) and runs with a 15-minute timeout. It contains a [nuke_generic_config.yml](nuke_generic_config.yaml) config and executes a [run.sh](run.sh) when invoked every Sunday at midnight UTC. + +![infrastructure-overview](nuke-overview.png) + +## Prerequisites + +1. **Non-Prod AWS Account Alias**: A non-prod account alias must exist in target account. Set the alias by running `python create_account_alias.py weathertop-test` or following [these instructions](https://docs.aws.amazon.com/IAM/latest/UserGuide/account-alias-create.html). + +## Setup and Installation + +For multi-account deployments, please use the [deploy.py](../../../DEPLOYMENT.md#option-1-using-deploypy) script. + +For single-account deployment, you can just run: + +```sh +cdk bootstrap && cdk deploy +``` + +Note a successful stack creation, e.g.: + +```bash +NukeStack: success: Published 956fbd116734e79edb987e767fe7f45d0b97e2123456789109103f80ba4c1:123456789101-us-east-1 +Stack undefined +NukeStack: deploying... [1/1] +NukeStack: creating CloudFormation changeset... + + ✅ NukeStack + +✨ Deployment time: 27.93s + +Stack ARN: +arn:aws:cloudformation:us-east-1:123456789101:stack/NukeStack/9835cc20-d358-11ef-bccf-123407dc82dd + +✨ Total time: 33.24s +``` diff --git a/.tools/test/stacks/nuke/typescript/account_nuker.ts b/.tools/test/stacks/nuke/typescript/account_nuker.ts new file mode 100644 index 00000000000..2698d657ad9 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/account_nuker.ts @@ -0,0 +1,65 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import * as cdk from "aws-cdk-lib"; +import * as events from "aws-cdk-lib/aws-events"; +import * as targets from "aws-cdk-lib/aws-events-targets"; +import * as iam from "aws-cdk-lib/aws-iam"; +import * as path from "path"; +import * as lambda from "aws-cdk-lib/aws-lambda"; +import { Duration, Stack, StackProps } from "aws-cdk-lib"; +import { Construct } from "constructs"; +import { DockerImageCode, DockerImageFunction } from "aws-cdk-lib/aws-lambda"; + +export interface NukeStackProps extends cdk.StackProps { + awsNukeDryRunFlag?: string; + awsNukeVersion?: string; + owner?: string; +} + +class NukeStack extends cdk.Stack { + private readonly nukeLambdaRole: iam.Role; + + constructor(scope: Construct, id: string, props?: StackProps) { + super(scope, id, props); + + // Lambda Function role + this.nukeLambdaRole = new iam.Role(this, "NukeLambdaRole", { + assumedBy: new iam.ServicePrincipal("lambda.amazonaws.com"), + managedPolicies: [ + iam.ManagedPolicy.fromAwsManagedPolicyName("AdministratorAccess"), + ], + }); + + // Create the Lambda function + const lambdaFunction = new DockerImageFunction( + this, + "docker-lambda-function", + { + functionName: "docker-lambda-fn", + code: DockerImageCode.fromImageAsset(path.join(__dirname)), + memorySize: 1024, + timeout: Duration.minutes(15), + architecture: lambda.Architecture.ARM_64, + description: "This is dockerized AWS Lambda function", + role: this.nukeLambdaRole, + }, + ); + + // Create EventBridge rule to trigger the Lambda function weekly + const rule = new events.Rule(this, "WeeklyTriggerRule", { + schedule: events.Schedule.expression("cron(0 0 ? * SUN *)"), // Runs at 00:00 every Sunday + }); + + // Add the Lambda function as a target for the EventBridge rule + rule.addTarget(new targets.LambdaFunction(lambdaFunction)); + } +} + +const app = new cdk.App(); +new NukeStack(app, "NukeStack", { + env: { + account: process.env.CDK_DEFAULT_ACCOUNT, + region: process.env.CDK_DEFAULT_REGION, + }, + terminationProtection: true, +}); diff --git a/.tools/test/stacks/nuke/typescript/cdk.json b/.tools/test/stacks/nuke/typescript/cdk.json new file mode 100644 index 00000000000..b75b3c38598 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/cdk.json @@ -0,0 +1,33 @@ +{ + "app": "npx ts-node --prefer-ts-exts account_nuker.ts", + "watch": { + "include": ["**"], + "exclude": [ + "README.md", + "cdk*.json", + "**/*.d.ts", + "**/*.js", + "tsconfig.json", + "package*.json", + "yarn.lock", + "node_modules", + "test" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": ["aws", "aws-cn"], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, + "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, + "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, + "@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true, + "@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true, + "cdk-migrate": true + } +} diff --git a/.tools/test/stacks/nuke/typescript/create_account_alias.py b/.tools/test/stacks/nuke/typescript/create_account_alias.py new file mode 100644 index 00000000000..c2e4601a843 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/create_account_alias.py @@ -0,0 +1,118 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +""" +This module is used to create an AWS account alias, which is required by the deploy.py script. + +It provides a function to create an account alias using the AWS CLI, as this specific +operation is not supported by the AWS CDK. +""" + +import logging +import re +import subprocess + +logger = logging.getLogger(__name__) + + +def _is_valid_alias(alias_name: str) -> bool: + """ + Check if the provided alias name is valid according to AWS rules. + + AWS account alias must be unique and must be between 3 and 63 characters long. + Valid characters are a-z, 0-9 and '-'. + + Args: + alias_name (str): The alias name to validate. + + Returns: + bool: True if the alias is valid, False otherwise. + """ + pattern = r"^[a-z0-9](([a-z0-9]|-){0,61}[a-z0-9])?$" + return bool(re.match(pattern, alias_name)) and 3 <= len(alias_name) <= 63 + + +def _log_aws_cli_version() -> None: + """ + Log the version of the AWS CLI installed on the system. + """ + try: + result = subprocess.run(["aws", "--version"], capture_output=True, text=True) + logger.info(f"AWS CLI version: {result.stderr.strip()}") + except Exception as e: + logger.warning(f"Unable to determine AWS CLI version: {str(e)}") + + +def create_account_alias(alias_name: str) -> None: + """ + Create a new account alias with the given name. + + This function exists because the CDK does not support the specific + CreateAccountAliases API call. It attempts to create an account alias + using the AWS CLI and logs the result. + + If the account alias is created successfully, it logs a success message. + If the account alias already exists, it logs a message indicating that. + If there is any other error, it logs the error message. + + Args: + alias_name (str): The desired name for the account alias. + """ + # Log AWS CLI version when the function is called + _log_aws_cli_version() + + if not _is_valid_alias(alias_name): + logger.error( + f"Invalid alias name '{alias_name}'. It must be between 3 and 63 characters long and contain only lowercase letters, numbers, and hyphens." + ) + return + + command = ["aws", "iam", "create-account-alias", "--account-alias", alias_name] + + try: + subprocess.run( + command, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + check=True, + ) + logger.info(f"Account alias '{alias_name}' created successfully.") + except subprocess.CalledProcessError as e: + if "EntityAlreadyExists" in e.stderr: + logger.info(f"Account alias '{alias_name}' already exists.") + elif "AccessDenied" in e.stderr: + logger.error( + f"Access denied when creating account alias '{alias_name}'. Check your AWS credentials and permissions." + ) + elif "ValidationError" in e.stderr: + logger.error( + f"Validation error when creating account alias '{alias_name}'. The alias might not meet AWS requirements." + ) + else: + logger.error(f"Error creating account alias '{alias_name}': {e.stderr}") + except Exception as e: + logger.error( + f"Unexpected error occurred while creating account alias '{alias_name}': {str(e)}" + ) + + +def main(): + import argparse + + # Set up logging + logging.basicConfig( + level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s" + ) + + # Create argument parser + parser = argparse.ArgumentParser(description="Create an AWS account alias") + parser.add_argument("alias", help="The alias name for the AWS account") + + # Parse arguments + args = parser.parse_args() + + # Call the function with the provided alias + create_account_alias(args.alias) + +if __name__ == "__main__": + main() diff --git a/.tools/test/stacks/nuke/typescript/nuke-architecture.jpg b/.tools/test/stacks/nuke/typescript/nuke-architecture.jpg new file mode 100644 index 00000000000..c5a69c71509 Binary files /dev/null and b/.tools/test/stacks/nuke/typescript/nuke-architecture.jpg differ diff --git a/.tools/test/stacks/nuke/typescript/nuke_generic_config.yaml b/.tools/test/stacks/nuke/typescript/nuke_generic_config.yaml new file mode 100644 index 00000000000..261b2c35950 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/nuke_generic_config.yaml @@ -0,0 +1,157 @@ +regions: + - us-east-1 + +blocklist: + # Must have 1+ blocklist entry (see https://aws-nuke.ekristen.dev/warning/) + - 000000000000 + +resource-types: + excludes: + - ACMCertificate + - AWSBackupPlan + - AWSBackupRecoveryPoint + - AWSBackupSelection + - AWSBackupVault + - AWSBackupVaultAccessPolicy + - CloudTrailTrail + - CloudWatchEventsTarget + - CodeCommitRepository + - CodeStarProject + - ConfigServiceConfigRule + - ECRRepository + - EC2Address + - EC2ClientVpnEndpoint + - EC2ClientVpnEndpointAttachment + - EC2CustomerGateway + - EC2DHCPOption + - EC2DefaultSecurityGroupRule + - EC2EgressOnlyInternetGateway + - EC2InternetGateway + - EC2InternetGatewayAttachment + - EC2KeyPair + - EC2NetworkACL + - EC2NetworkInterface + - EC2RouteTable + - EC2SecurityGroup + - EC2Subnet + - EC2VPC + - EC2VPCEndpoint + - IAMGroup + - IAMGroupPolicy + - IAMGroupPolicyAttachment + - IAMInstanceProfile + - IAMInstanceProfileRole + - IAMLoginProfile + - IAMOpenIDConnectProvider + - IAMPolicy + - IAMRole + - IAMRolePolicy + - IAMRolePolicyAttachment + - IAMSAMLProvider + - IAMServerCertificate + - IAMServiceSpecificCredential + - IAMSigningCertificate + - IAMUser + - IAMUserAccessKey + - IAMUserGroupAttachment + - IAMUserPolicy + - IAMUserPolicyAttachment + - IAMUserSSHPublicKey + - IAMVirtualMFADevice + - KMSAlias + - KMSKey + - Route53HostedZone + - Route53ResourceRecordSet + - S3Bucket + - S3Object + - SecretsManagerSecret + - SQSQueue + - SSMParameter + +accounts: + AWSACCOUNTID: + filters: + EC2VPC: + - property: IsDefault + value: "true" + EC2DHCPOption: + - property: DefaultVPC + value: "true" + EC2InternetGateway: + - property: DefaultVPC + value: "true" + EC2InternetGatewayAttachment: + - property: DefaultVPC + value: "true" + EC2Subnet: + - property: DefaultVPC + value: "true" + EC2RouteTable: + - property: DefaultVPC + value: "true" + EC2DefaultSecurityGroupRule: + - property: SecurityGroupId + type: glob + value: "*" + LambdaEventSourceMapping: + - property: "EventSourceArn" + type: "glob" + value: "^(PluginStack|NukeStack)*$" + - property: "FunctionArn" + type: "glob" + value: "^(PluginStack|NukeStack)*$" + LambdaPermission: + - property: "name" + type: "glob" + value: "^(PluginStack|NukeStack)*$" + GuardDutyDetector: + - property: DetectorID + type: glob + value: "*" + CloudWatchEventsRule: + - type: regex + value: "^Rule: (AwsSecurity.*)$" + CloudWatchEventsTarget: + - type: regex + value: "^Rule: (AwsSecurity.*)$" + CloudWatchLogsLogGroup: + - type: regex + value: "^.*$" + ConfigServiceDeliveryChannel: + - "default" + ConfigServiceConfigRule: + - type: regex + value: "^(managed-ec2-patch-compliance|ec2-managed-by-systems-manager-REMEDIATE)$" + S3Bucket: + - property: Name + type: regex + value: "^(cdktoolkit-stagingbucket-.*|aws-nuke.*)$" + S3Object: + - property: Bucket + type: regex + value: "^(cdktoolkit-stagingbucket-.*|aws-nuke.*)$" + ConfigServiceConfigurationRecorder: + - "MainRecorder" + CloudFormationStack: + - property: Name + type: regex + value: "^(CDKToolkit)$" + - property: Name + type: regex + value: "^(PluginStack|NukeStack)*$" + IAMPolicy: + - property: Name + type: regex + value: "^(ConfigAccessPolicy|ResourceConfigurationCollectorPolicy|CloudFormationRefereeService|EC2CapacityReservationService|AwsSecurit.*AuditPolicy)$" + IAMRole: + - property: Name + type: regex + value: "^(AWSServiceRoleFor.*|Admin|ReadOnly|InternalAuditInternal|EC2CapacityReservationService|AccessAnalyzerTrustedService|AwsSecurit.*Audit|AWS.*Audit)$" + IAMRolePolicy: + - property: role:RoleName + type: regex + value: "^(AccessAnalyzerTrustedService|AwsSecurit.*Audit)$" + IAMRolePolicyAttachment: + - property: RoleName + type: regex + value: "^(Admin|ReadOnly|AWSServiceRoleFor.*|InternalAuditInternal|EC2CapacityReservationService|AWSVAPTAudit|AwsSecurit.*Audit)$" diff --git a/.tools/test/stacks/nuke/typescript/package.json b/.tools/test/stacks/nuke/typescript/package.json new file mode 100644 index 00000000000..8353504f81d --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/package.json @@ -0,0 +1,27 @@ +{ + "name": "account_nuker", + "version": "0.1.0", + "bin": { + "nuke_cleanser": "account_nuker.ts" + }, + "scripts": { + "build": "tsc", + "watch": "tsc -w", + "test": "jest", + "cdk": "cdk" + }, + "devDependencies": { + "@types/jest": "^29.5.12", + "@types/node": "22.5.4", + "aws-cdk": "2.164.1", + "jest": "^29.7.0", + "ts-jest": "^29.2.5", + "ts-node": "^10.9.2", + "typescript": "~5.6.2" + }, + "dependencies": { + "aws-cdk-lib": "^2.164.1", + "constructs": "^10.4.2", + "source-map-support": "^0.5.21" + } +} diff --git a/.tools/test/stacks/nuke/typescript/run.sh b/.tools/test/stacks/nuke/typescript/run.sh new file mode 100755 index 00000000000..649d8857ba1 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/run.sh @@ -0,0 +1,15 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 +#!/bin/sh + +# Get AWS account ID +AWS_ACCOUNT_ID=$(aws sts get-caller-identity --query Account --output text) +echo "AWS Account ID: $AWS_ACCOUNT_ID" + +# Copy the config file to /tmp and inject Account ID +echo "Copying & updating config file..." +cp /nuke_generic_config.yaml /tmp/nuke_config.yaml +sed -i "s/AWSACCOUNTID/$AWS_ACCOUNT_ID/g" /tmp/nuke_config.yaml + +echo "Running aws-nuke command:" +/usr/local/bin/aws-nuke run --config /tmp/nuke_config.yaml --force --max-wait-retries --no-dry-run 10 2>&1 diff --git a/.tools/test/stacks/nuke/typescript/tsconfig.json b/.tools/test/stacks/nuke/typescript/tsconfig.json new file mode 100644 index 00000000000..464ed774ba8 --- /dev/null +++ b/.tools/test/stacks/nuke/typescript/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ES2020", + "module": "commonjs", + "lib": ["es2020", "dom"], + "declaration": true, + "strict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "noImplicitThis": true, + "alwaysStrict": true, + "noUnusedLocals": false, + "noUnusedParameters": false, + "noImplicitReturns": true, + "noFallthroughCasesInSwitch": false, + "inlineSourceMap": true, + "inlineSources": true, + "experimentalDecorators": true, + "strictPropertyInitialization": false, + "typeRoots": ["./node_modules/@types"] + }, + "exclude": ["node_modules", "cdk.out"] +} diff --git a/.tools/test/stacks/plugin/typescript/plugin_stack.ts b/.tools/test/stacks/plugin/typescript/plugin_stack.ts index 42357ad94fa..c9ac012fe97 100644 --- a/.tools/test/stacks/plugin/typescript/plugin_stack.ts +++ b/.tools/test/stacks/plugin/typescript/plugin_stack.ts @@ -114,7 +114,7 @@ class PluginStack extends cdk.Stack { type: "FARGATE", subnets: vpc.selectSubnets().subnetIds, securityGroupIds: [sg.securityGroupId], - maxvCpus: 1, + maxvCpus: 256, }, } ); diff --git a/applications/feedback_sentiment_analyzer/cdk/package-lock.json b/applications/feedback_sentiment_analyzer/cdk/package-lock.json index e5b2152fdde..993b74645e1 100644 --- a/applications/feedback_sentiment_analyzer/cdk/package-lock.json +++ b/applications/feedback_sentiment_analyzer/cdk/package-lock.json @@ -8,7 +8,7 @@ "name": "cdk", "version": "0.1.0", "dependencies": { - "aws-cdk-lib": "^2.85.0", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.2.60", "source-map-support": "^0.5.21" }, @@ -41,19 +41,55 @@ } }, "node_modules/@aws-cdk/asset-awscli-v1": { - "version": "2.2.186", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.186.tgz", - "integrity": "sha512-2wSuOWQlrWc0AFuPCzXYn2Y8oK2vTfpNrVa8dxBxfswbwUrXMAirhpsP1f1J/4KEhA/4Hs4l27dKiC/IcDrvIQ==" + "version": "2.2.221", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.221.tgz", + "integrity": "sha512-+Vu2cMvgtkaHwNezrTVng4+FAMAWKJTkC/2ZQlgkbY05k0lHHK/2eWKqBhTeA7EpxVrx9uFN7GdBFz3mcThpxg==", + "license": "Apache-2.0" }, "node_modules/@aws-cdk/asset-kubectl-v20": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.1.tgz", - "integrity": "sha512-U1ntiX8XiMRRRH5J1IdC+1t5CE89015cwyt5U63Cpk0GnMlN5+h9WsWMlKlPXZR4rdq/m806JRlBMRpBUB2Dhw==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.3.tgz", + "integrity": "sha512-cDG1w3ieM6eOT9mTefRuTypk95+oyD7P5X/wRltwmYxU7nZc3+076YEVS6vrjDKr3ADYbfn0lDKpfB1FBtO9CQ==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz", + "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==", + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "39.2.9", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-39.2.9.tgz", + "integrity": "sha512-Ao4C8WoM5wgU4yn0aKLvI4gtgiRDa+8bVVwOlhGK9/jHmZlgMZY44UY9muq6qMKsMXTmfQeaB8LS3JLOiEUheA==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "~1.4.1", + "semver": "^7.7.0" + } }, - "node_modules/@aws-cdk/asset-node-proxy-agent-v5": { - "version": "2.0.155", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v5/-/asset-node-proxy-agent-v5-2.0.155.tgz", - "integrity": "sha512-Q+Ny25hUPINlBbS6lmbUr4m6Tr6ToEJBla7sXA3FO3JUD0Z69ddcgbhuEBF8Rh1a2xmPONm89eX77kwK2fb4vQ==" + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.7.0", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } }, "node_modules/@aws-crypto/crc32": { "version": "3.0.0", @@ -2439,9 +2475,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.85.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.85.0.tgz", - "integrity": "sha512-u+ypK8XEMRH3tGRMSmcbPYxLet7xBdGIztUkMcPtlNJGhS/vxqh12yYkem3g3zzmHwdX8OPLSnlZ2sIuiIqp/g==", + "version": "2.177.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.177.0.tgz", + "integrity": "sha512-nTnHAwjZaPJ5gfJjtzE/MyK6q0a66nWthoJl7l8srucRb+I30dczhbbXor6QCdVpJaTRAEliMOMq23aglsAQbg==", "bundleDependencies": [ "@balena/dockerignore", "case", @@ -2452,21 +2488,25 @@ "punycode", "semver", "table", - "yaml" + "yaml", + "mime-types" ], + "license": "Apache-2.0", "dependencies": { - "@aws-cdk/asset-awscli-v1": "^2.2.177", - "@aws-cdk/asset-kubectl-v20": "^2.1.1", - "@aws-cdk/asset-node-proxy-agent-v5": "^2.0.148", + "@aws-cdk/asset-awscli-v1": "^2.2.208", + "@aws-cdk/asset-kubectl-v20": "^2.1.3", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", + "@aws-cdk/cloud-assembly-schema": "^39.2.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", - "fs-extra": "^11.1.1", - "ignore": "^5.2.4", + "fs-extra": "^11.2.0", + "ignore": "^5.3.2", "jsonschema": "^1.4.1", + "mime-types": "^2.1.35", "minimatch": "^3.1.2", - "punycode": "^2.3.0", - "semver": "^7.5.1", - "table": "^6.8.1", + "punycode": "^2.3.1", + "semver": "^7.6.3", + "table": "^6.8.2", "yaml": "1.10.2" }, "engines": { @@ -2482,14 +2522,14 @@ "license": "Apache-2.0" }, "node_modules/aws-cdk-lib/node_modules/ajv": { - "version": "8.12.0", + "version": "8.17.1", "inBundle": true, "license": "MIT", "dependencies": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -2579,8 +2619,13 @@ "inBundle": true, "license": "MIT" }, + "node_modules/aws-cdk-lib/node_modules/fast-uri": { + "version": "3.0.3", + "inBundle": true, + "license": "BSD-3-Clause" + }, "node_modules/aws-cdk-lib/node_modules/fs-extra": { - "version": "11.1.1", + "version": "11.2.0", "inBundle": true, "license": "MIT", "dependencies": { @@ -2598,7 +2643,7 @@ "license": "ISC" }, "node_modules/aws-cdk-lib/node_modules/ignore": { - "version": "5.2.4", + "version": "5.3.2", "inBundle": true, "license": "MIT", "engines": { @@ -2642,15 +2687,23 @@ "inBundle": true, "license": "MIT" }, - "node_modules/aws-cdk-lib/node_modules/lru-cache": { - "version": "6.0.0", + "node_modules/aws-cdk-lib/node_modules/mime-db": { + "version": "1.52.0", "inBundle": true, - "license": "ISC", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/mime-types": { + "version": "2.1.35", + "inBundle": true, + "license": "MIT", "dependencies": { - "yallist": "^4.0.0" + "mime-db": "1.52.0" }, "engines": { - "node": ">=10" + "node": ">= 0.6" } }, "node_modules/aws-cdk-lib/node_modules/minimatch": { @@ -2665,7 +2718,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/punycode": { - "version": "2.3.0", + "version": "2.3.1", "inBundle": true, "license": "MIT", "engines": { @@ -2681,12 +2734,9 @@ } }, "node_modules/aws-cdk-lib/node_modules/semver": { - "version": "7.5.2", + "version": "7.6.3", "inBundle": true, "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -2735,7 +2785,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/table": { - "version": "6.8.1", + "version": "6.8.2", "inBundle": true, "license": "BSD-3-Clause", "dependencies": { @@ -2750,26 +2800,13 @@ } }, "node_modules/aws-cdk-lib/node_modules/universalify": { - "version": "2.0.0", + "version": "2.0.1", "inBundle": true, "license": "MIT", "engines": { "node": ">= 10.0.0" } }, - "node_modules/aws-cdk-lib/node_modules/uri-js": { - "version": "4.4.1", - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/aws-cdk-lib/node_modules/yallist": { - "version": "4.0.0", - "inBundle": true, - "license": "ISC" - }, "node_modules/aws-cdk-lib/node_modules/yaml": { "version": "1.10.2", "inBundle": true, @@ -5359,19 +5396,38 @@ } }, "@aws-cdk/asset-awscli-v1": { - "version": "2.2.186", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.186.tgz", - "integrity": "sha512-2wSuOWQlrWc0AFuPCzXYn2Y8oK2vTfpNrVa8dxBxfswbwUrXMAirhpsP1f1J/4KEhA/4Hs4l27dKiC/IcDrvIQ==" + "version": "2.2.221", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.221.tgz", + "integrity": "sha512-+Vu2cMvgtkaHwNezrTVng4+FAMAWKJTkC/2ZQlgkbY05k0lHHK/2eWKqBhTeA7EpxVrx9uFN7GdBFz3mcThpxg==" }, "@aws-cdk/asset-kubectl-v20": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.1.tgz", - "integrity": "sha512-U1ntiX8XiMRRRH5J1IdC+1t5CE89015cwyt5U63Cpk0GnMlN5+h9WsWMlKlPXZR4rdq/m806JRlBMRpBUB2Dhw==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.3.tgz", + "integrity": "sha512-cDG1w3ieM6eOT9mTefRuTypk95+oyD7P5X/wRltwmYxU7nZc3+076YEVS6vrjDKr3ADYbfn0lDKpfB1FBtO9CQ==" }, - "@aws-cdk/asset-node-proxy-agent-v5": { - "version": "2.0.155", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v5/-/asset-node-proxy-agent-v5-2.0.155.tgz", - "integrity": "sha512-Q+Ny25hUPINlBbS6lmbUr4m6Tr6ToEJBla7sXA3FO3JUD0Z69ddcgbhuEBF8Rh1a2xmPONm89eX77kwK2fb4vQ==" + "@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz", + "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==" + }, + "@aws-cdk/cloud-assembly-schema": { + "version": "39.2.9", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-39.2.9.tgz", + "integrity": "sha512-Ao4C8WoM5wgU4yn0aKLvI4gtgiRDa+8bVVwOlhGK9/jHmZlgMZY44UY9muq6qMKsMXTmfQeaB8LS3JLOiEUheA==", + "requires": { + "jsonschema": "~1.4.1", + "semver": "^7.7.0" + }, + "dependencies": { + "jsonschema": { + "version": "1.4.1", + "bundled": true + }, + "semver": { + "version": "7.7.0", + "bundled": true + } + } }, "@aws-crypto/crc32": { "version": "3.0.0", @@ -7343,22 +7399,24 @@ } }, "aws-cdk-lib": { - "version": "2.85.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.85.0.tgz", - "integrity": "sha512-u+ypK8XEMRH3tGRMSmcbPYxLet7xBdGIztUkMcPtlNJGhS/vxqh12yYkem3g3zzmHwdX8OPLSnlZ2sIuiIqp/g==", - "requires": { - "@aws-cdk/asset-awscli-v1": "^2.2.177", - "@aws-cdk/asset-kubectl-v20": "^2.1.1", - "@aws-cdk/asset-node-proxy-agent-v5": "^2.0.148", + "version": "2.177.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.177.0.tgz", + "integrity": "sha512-nTnHAwjZaPJ5gfJjtzE/MyK6q0a66nWthoJl7l8srucRb+I30dczhbbXor6QCdVpJaTRAEliMOMq23aglsAQbg==", + "requires": { + "@aws-cdk/asset-awscli-v1": "^2.2.208", + "@aws-cdk/asset-kubectl-v20": "^2.1.3", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", + "@aws-cdk/cloud-assembly-schema": "^39.2.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", - "fs-extra": "^11.1.1", - "ignore": "^5.2.4", + "fs-extra": "^11.2.0", + "ignore": "^5.3.2", "jsonschema": "^1.4.1", + "mime-types": "^2.1.35", "minimatch": "^3.1.2", - "punycode": "^2.3.0", - "semver": "^7.5.1", - "table": "^6.8.1", + "punycode": "^2.3.1", + "semver": "^7.6.3", + "table": "^6.8.2", "yaml": "1.10.2" }, "dependencies": { @@ -7367,13 +7425,13 @@ "bundled": true }, "ajv": { - "version": "8.12.0", + "version": "8.17.1", "bundled": true, "requires": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" } }, "ansi-regex": { @@ -7430,8 +7488,12 @@ "version": "3.1.3", "bundled": true }, + "fast-uri": { + "version": "3.0.3", + "bundled": true + }, "fs-extra": { - "version": "11.1.1", + "version": "11.2.0", "bundled": true, "requires": { "graceful-fs": "^4.2.0", @@ -7444,7 +7506,7 @@ "bundled": true }, "ignore": { - "version": "5.2.4", + "version": "5.3.2", "bundled": true }, "is-fullwidth-code-point": { @@ -7471,11 +7533,15 @@ "version": "4.4.2", "bundled": true }, - "lru-cache": { - "version": "6.0.0", + "mime-db": { + "version": "1.52.0", + "bundled": true + }, + "mime-types": { + "version": "2.1.35", "bundled": true, "requires": { - "yallist": "^4.0.0" + "mime-db": "1.52.0" } }, "minimatch": { @@ -7486,7 +7552,7 @@ } }, "punycode": { - "version": "2.3.0", + "version": "2.3.1", "bundled": true }, "require-from-string": { @@ -7494,11 +7560,8 @@ "bundled": true }, "semver": { - "version": "7.5.2", - "bundled": true, - "requires": { - "lru-cache": "^6.0.0" - } + "version": "7.6.3", + "bundled": true }, "slice-ansi": { "version": "4.0.0", @@ -7526,7 +7589,7 @@ } }, "table": { - "version": "6.8.1", + "version": "6.8.2", "bundled": true, "requires": { "ajv": "^8.0.1", @@ -7537,18 +7600,7 @@ } }, "universalify": { - "version": "2.0.0", - "bundled": true - }, - "uri-js": { - "version": "4.4.1", - "bundled": true, - "requires": { - "punycode": "^2.1.0" - } - }, - "yallist": { - "version": "4.0.0", + "version": "2.0.1", "bundled": true }, "yaml": { diff --git a/applications/feedback_sentiment_analyzer/cdk/package.json b/applications/feedback_sentiment_analyzer/cdk/package.json index 62213a5fe64..0da93c5cb1d 100644 --- a/applications/feedback_sentiment_analyzer/cdk/package.json +++ b/applications/feedback_sentiment_analyzer/cdk/package.json @@ -22,7 +22,7 @@ "typescript": "~5.0.4" }, "dependencies": { - "aws-cdk-lib": "^2.85.0", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.2.60", "source-map-support": "^0.5.21" } diff --git a/applications/feedback_sentiment_analyzer/client/package-lock.json b/applications/feedback_sentiment_analyzer/client/package-lock.json index 69247a2bd80..6db1c4f50c8 100644 --- a/applications/feedback_sentiment_analyzer/client/package-lock.json +++ b/applications/feedback_sentiment_analyzer/client/package-lock.json @@ -20,7 +20,7 @@ "@types/react-dom": "^18.2.4", "@vitejs/plugin-react": "^4.0.1", "typescript": "^5.1.3", - "vite": "^4.5.5" + "vite": "^4.5.9" } }, "node_modules/@ampproject/remapping": { @@ -1685,10 +1685,11 @@ } }, "node_modules/vite": { - "version": "4.5.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.5.tgz", - "integrity": "sha512-ifW3Lb2sMdX+WU91s3R0FyQlAyLxOzCSCP37ujw0+r5POeHPwe6udWVIElKQq8gk3t7b8rkmvqC6IHBpCff4GQ==", + "version": "4.5.9", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.9.tgz", + "integrity": "sha512-qK9W4xjgD3gXbC0NmdNFFnVFLMWSNiR3swj957yutwzzN16xF/E7nmtAyp1rT9hviDroQANjE4HK3H4WqWdFtw==", "dev": true, + "license": "MIT", "dependencies": { "esbuild": "^0.18.10", "postcss": "^8.4.27", @@ -2894,9 +2895,9 @@ "requires": {} }, "vite": { - "version": "4.5.5", - "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.5.tgz", - "integrity": "sha512-ifW3Lb2sMdX+WU91s3R0FyQlAyLxOzCSCP37ujw0+r5POeHPwe6udWVIElKQq8gk3t7b8rkmvqC6IHBpCff4GQ==", + "version": "4.5.9", + "resolved": "https://registry.npmjs.org/vite/-/vite-4.5.9.tgz", + "integrity": "sha512-qK9W4xjgD3gXbC0NmdNFFnVFLMWSNiR3swj957yutwzzN16xF/E7nmtAyp1rT9hviDroQANjE4HK3H4WqWdFtw==", "dev": true, "requires": { "esbuild": "^0.18.10", diff --git a/applications/feedback_sentiment_analyzer/client/package.json b/applications/feedback_sentiment_analyzer/client/package.json index 52e08a08e67..332c5a9ce7c 100644 --- a/applications/feedback_sentiment_analyzer/client/package.json +++ b/applications/feedback_sentiment_analyzer/client/package.json @@ -21,6 +21,6 @@ "@types/react-dom": "^18.2.4", "@vitejs/plugin-react": "^4.0.1", "typescript": "^5.1.3", - "vite": "^4.5.5" + "vite": "^4.5.9" } } diff --git a/applications/photo-asset-manager/cdk/package-lock.json b/applications/photo-asset-manager/cdk/package-lock.json index bb25e68274e..3effaf325bf 100644 --- a/applications/photo-asset-manager/cdk/package-lock.json +++ b/applications/photo-asset-manager/cdk/package-lock.json @@ -10,7 +10,7 @@ "dependencies": { "@aws-cdk/aws-cloudformation": "^1.196.0", "@aws-sdk/client-cloudformation": "^3.621.0", - "aws-cdk-lib": "^2.82.0", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.0.0" }, "bin": { @@ -24,19 +24,22 @@ } }, "node_modules/@aws-cdk/asset-awscli-v1": { - "version": "2.2.185", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.185.tgz", - "integrity": "sha512-cost0pu5nsmQmFhVxN4OonThGhgQeSlwntdXsEi5v8buVg+X4MzcXemmmSZxkkzzFCoS0r4w/7BiX1e+mMkFVA==" + "version": "2.2.222", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.222.tgz", + "integrity": "sha512-9qjd91FwBYmxjfF3ckieTKrmmvIBZdSe1Daf/hRGxAPnhtH9Fm5Y3Oi0dJD2tRw0ufyM6AbvX9zgejcTqXc+LQ==", + "license": "Apache-2.0" }, "node_modules/@aws-cdk/asset-kubectl-v20": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.1.tgz", - "integrity": "sha512-U1ntiX8XiMRRRH5J1IdC+1t5CE89015cwyt5U63Cpk0GnMlN5+h9WsWMlKlPXZR4rdq/m806JRlBMRpBUB2Dhw==" + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.3.tgz", + "integrity": "sha512-cDG1w3ieM6eOT9mTefRuTypk95+oyD7P5X/wRltwmYxU7nZc3+076YEVS6vrjDKr3ADYbfn0lDKpfB1FBtO9CQ==", + "license": "Apache-2.0" }, - "node_modules/@aws-cdk/asset-node-proxy-agent-v5": { - "version": "2.0.155", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v5/-/asset-node-proxy-agent-v5-2.0.155.tgz", - "integrity": "sha512-Q+Ny25hUPINlBbS6lmbUr4m6Tr6ToEJBla7sXA3FO3JUD0Z69ddcgbhuEBF8Rh1a2xmPONm89eX77kwK2fb4vQ==" + "node_modules/@aws-cdk/asset-node-proxy-agent-v6": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz", + "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==", + "license": "Apache-2.0" }, "node_modules/@aws-cdk/assets": { "version": "1.196.0", @@ -2232,9 +2235,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.82.0", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.82.0.tgz", - "integrity": "sha512-icLhHvoxxo5mu9z8oplSHF+A7scbRiXYoRp2hyFkYSCoY9H+eBeIVXKA2S5YPpJfJO4SeORbCQnsyXBbz31XXw==", + "version": "2.177.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.177.0.tgz", + "integrity": "sha512-nTnHAwjZaPJ5gfJjtzE/MyK6q0a66nWthoJl7l8srucRb+I30dczhbbXor6QCdVpJaTRAEliMOMq23aglsAQbg==", "bundleDependencies": [ "@balena/dockerignore", "case", @@ -2245,21 +2248,25 @@ "punycode", "semver", "table", - "yaml" + "yaml", + "mime-types" ], + "license": "Apache-2.0", "dependencies": { - "@aws-cdk/asset-awscli-v1": "^2.2.177", - "@aws-cdk/asset-kubectl-v20": "^2.1.1", - "@aws-cdk/asset-node-proxy-agent-v5": "^2.0.148", + "@aws-cdk/asset-awscli-v1": "^2.2.208", + "@aws-cdk/asset-kubectl-v20": "^2.1.3", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", + "@aws-cdk/cloud-assembly-schema": "^39.2.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", - "fs-extra": "^11.1.1", - "ignore": "^5.2.4", + "fs-extra": "^11.2.0", + "ignore": "^5.3.2", "jsonschema": "^1.4.1", + "mime-types": "^2.1.35", "minimatch": "^3.1.2", - "punycode": "^2.3.0", - "semver": "^7.5.1", - "table": "^6.8.1", + "punycode": "^2.3.1", + "semver": "^7.6.3", + "table": "^6.8.2", "yaml": "1.10.2" }, "engines": { @@ -2269,20 +2276,53 @@ "constructs": "^10.0.0" } }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "39.2.15", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-39.2.15.tgz", + "integrity": "sha512-roeUKO5QR9JLnNEULg0RiS1ac6PZ9qsPaOcAJXCP0D1NLLECdxwwqJvLbhV91pCWrGTeWY5OhLtlL5OPS6Ycvg==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "~1.4.1", + "semver": "^7.7.1" + } + }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/aws-cdk-lib/node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.7.1", + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/aws-cdk-lib/node_modules/@balena/dockerignore": { "version": "1.0.2", "inBundle": true, "license": "Apache-2.0" }, "node_modules/aws-cdk-lib/node_modules/ajv": { - "version": "8.12.0", + "version": "8.17.1", "inBundle": true, "license": "MIT", "dependencies": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -2372,8 +2412,13 @@ "inBundle": true, "license": "MIT" }, + "node_modules/aws-cdk-lib/node_modules/fast-uri": { + "version": "3.0.3", + "inBundle": true, + "license": "BSD-3-Clause" + }, "node_modules/aws-cdk-lib/node_modules/fs-extra": { - "version": "11.1.1", + "version": "11.2.0", "inBundle": true, "license": "MIT", "dependencies": { @@ -2391,7 +2436,7 @@ "license": "ISC" }, "node_modules/aws-cdk-lib/node_modules/ignore": { - "version": "5.2.4", + "version": "5.3.2", "inBundle": true, "license": "MIT", "engines": { @@ -2435,15 +2480,23 @@ "inBundle": true, "license": "MIT" }, - "node_modules/aws-cdk-lib/node_modules/lru-cache": { - "version": "6.0.0", + "node_modules/aws-cdk-lib/node_modules/mime-db": { + "version": "1.52.0", "inBundle": true, - "license": "ISC", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/aws-cdk-lib/node_modules/mime-types": { + "version": "2.1.35", + "inBundle": true, + "license": "MIT", "dependencies": { - "yallist": "^4.0.0" + "mime-db": "1.52.0" }, "engines": { - "node": ">=10" + "node": ">= 0.6" } }, "node_modules/aws-cdk-lib/node_modules/minimatch": { @@ -2458,7 +2511,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/punycode": { - "version": "2.3.0", + "version": "2.3.1", "inBundle": true, "license": "MIT", "engines": { @@ -2474,12 +2527,9 @@ } }, "node_modules/aws-cdk-lib/node_modules/semver": { - "version": "7.5.1", + "version": "7.6.3", "inBundle": true, "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -2528,7 +2578,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/table": { - "version": "6.8.1", + "version": "6.8.2", "inBundle": true, "license": "BSD-3-Clause", "dependencies": { @@ -2543,26 +2593,13 @@ } }, "node_modules/aws-cdk-lib/node_modules/universalify": { - "version": "2.0.0", + "version": "2.0.1", "inBundle": true, "license": "MIT", "engines": { "node": ">= 10.0.0" } }, - "node_modules/aws-cdk-lib/node_modules/uri-js": { - "version": "4.4.1", - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/aws-cdk-lib/node_modules/yallist": { - "version": "4.0.0", - "inBundle": true, - "license": "ISC" - }, "node_modules/aws-cdk-lib/node_modules/yaml": { "version": "1.10.2", "inBundle": true, diff --git a/applications/photo-asset-manager/cdk/package.json b/applications/photo-asset-manager/cdk/package.json index c0903aacfd2..6d44900e986 100644 --- a/applications/photo-asset-manager/cdk/package.json +++ b/applications/photo-asset-manager/cdk/package.json @@ -18,7 +18,7 @@ "dependencies": { "@aws-cdk/aws-cloudformation": "^1.196.0", "@aws-sdk/client-cloudformation": "^3.621.0", - "aws-cdk-lib": "^2.82.0", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.0.0" } } diff --git a/aws-cli/bash-linux/iam/README.md b/aws-cli/bash-linux/iam/README.md index 19baf85864c..a204208e13e 100644 --- a/aws-cli/bash-linux/iam/README.md +++ b/aws-cli/bash-linux/iam/README.md @@ -45,14 +45,15 @@ Code excerpts that show you how to call individual service functions. - [CreatePolicy](iam_operations.sh#L421) - [CreateRole](iam_operations.sh#L342) - [CreateUser](iam_operations.sh#L113) -- [DeleteAccessKey](iam_operations.sh#L787) +- [DeleteAccessKey](iam_operations.sh#L904) - [DeletePolicy](iam_operations.sh#L646) - [DeleteRole](iam_operations.sh#L716) -- [DeleteUser](iam_operations.sh#L868) +- [DeleteUser](iam_operations.sh#L985) - [DetachRolePolicy](iam_operations.sh#L571) - [GetUser](iam_operations.sh#L17) - [ListAccessKeys](iam_operations.sh#L273) - [ListUsers](iam_operations.sh#L56) +- [UpdateAccessKey](iam_operations.sh#L787) @@ -110,4 +111,4 @@ in the `aws-cli` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/aws-cli/bash-linux/iam/iam_create_user_assume_role_scenario.sh b/aws-cli/bash-linux/iam/iam_create_user_assume_role_scenario.sh index d630c862708..2cb96f9d3fd 100755 --- a/aws-cli/bash-linux/iam/iam_create_user_assume_role_scenario.sh +++ b/aws-cli/bash-linux/iam/iam_create_user_assume_role_scenario.sh @@ -351,6 +351,15 @@ function clean_up() { fi fi + if [ -n "$access_key_name" ]; then + if (iam_update_access_key -u "$user_name" -k "$access_key_name" -d); then + echo "Deactivated access key $access_key_name" + else + errecho "The access key failed to deactivate." + result=1 + fi + fi + if [ -n "$access_key_name" ]; then if (iam_delete_access_key -u "$user_name" -k "$access_key_name"); then echo "Deleted access key $access_key_name" diff --git a/aws-cli/bash-linux/iam/iam_operations.sh b/aws-cli/bash-linux/iam/iam_operations.sh index 73a0e03db08..ac375b1c00c 100644 --- a/aws-cli/bash-linux/iam/iam_operations.sh +++ b/aws-cli/bash-linux/iam/iam_operations.sh @@ -133,7 +133,7 @@ function iam_create_user() { # bashsupport disable=BP5008 function usage() { echo "function iam_create_user" - echo "Creates an WS Identity and Access Management (IAM) user. You must supply a username:" + echo "Creates an AWS Identity and Access Management (IAM) user. You must supply a username:" echo " -u user_name The name of the user. It must be unique within the account." echo "" } @@ -663,7 +663,7 @@ function iam_delete_policy() { # bashsupport disable=BP5008 function usage() { echo "function iam_delete_policy" - echo "Deletes an WS Identity and Access Management (IAM) policy" + echo "Deletes an AWS Identity and Access Management (IAM) policy" echo " -n policy_arn -- The name of the IAM policy arn." echo "" } @@ -733,7 +733,7 @@ function iam_delete_role() { # bashsupport disable=BP5008 function usage() { echo "function iam_delete_role" - echo "Deletes an WS Identity and Access Management (IAM) role" + echo "Deletes an AWS Identity and Access Management (IAM) role" echo " -n role_name -- The name of the IAM role." echo "" } @@ -784,6 +784,123 @@ function iam_delete_role() { } # snippet-end:[aws-cli.bash-linux.iam.DeleteRole] +# snippet-start:[aws-cli.bash-linux.iam.UpdateAccessKey] +############################################################################### +# function iam_update_access_key +# +# This function can activate or deactivate an IAM access key for the specified IAM user. +# +# Parameters: +# -u user_name -- The name of the user. +# -k access_key -- The access key to update. +# -a -- Activate the selected access key. +# -d -- Deactivate the selected access key. +# +# Example: +# # To deactivate the selected access key for IAM user Bob +# iam_update_access_key -u Bob -k AKIAIOSFODNN7EXAMPLE -d +# +# Returns: +# 0 - If successful. +# 1 - If it fails. +############################################################################### +function iam_update_access_key() { + local user_name access_key status response + local option OPTARG # Required to use getopts command in a function. + local activate_flag=false deactivate_flag=false + + # bashsupport disable=BP5008 + function usage() { + echo "function iam_update_access_key" + echo "Updates the status of an AWS Identity and Access Management (IAM) access key for the specified IAM user" + echo " -u user_name The name of the user." + echo " -k access_key The access key to update." + echo " -a Activate the access key." + echo " -d Deactivate the access key." + echo "" + } + + # Retrieve the calling parameters. + while getopts "u:k:adh" option; do + case "${option}" in + u) user_name="${OPTARG}" ;; + k) access_key="${OPTARG}" ;; + a) activate_flag=true ;; + d) deactivate_flag=true ;; + h) + usage + return 0 + ;; + \?) + echo "Invalid parameter" + usage + return 1 + ;; + esac + done + export OPTIND=1 + + # Validate input parameters + if [[ -z "$user_name" ]]; then + errecho "ERROR: You must provide a username with the -u parameter." + usage + return 1 + fi + + if [[ -z "$access_key" ]]; then + errecho "ERROR: You must provide an access key with the -k parameter." + usage + return 1 + fi + + # Ensure that only -a or -d is specified + if [[ "$activate_flag" == true && "$deactivate_flag" == true ]]; then + errecho "ERROR: You cannot specify both -a (activate) and -d (deactivate) at the same time." + usage + return 1 + fi + + # If neither -a nor -d is provided, return an error + if [[ "$activate_flag" == false && "$deactivate_flag" == false ]]; then + errecho "ERROR: You must specify either -a (activate) or -d (deactivate)." + usage + return 1 + fi + + # Determine the status based on the flag + if [[ "$activate_flag" == true ]]; then + status="Active" + elif [[ "$deactivate_flag" == true ]]; then + status="Inactive" + fi + + iecho "Parameters:\n" + iecho " Username: $user_name" + iecho " Access key: $access_key" + iecho " New status: $status" + iecho "" + + # Update the access key status + response=$(aws iam update-access-key \ + --user-name "$user_name" \ + --access-key-id "$access_key" \ + --status "$status" 2>&1) + + local error_code=${?} + + if [[ $error_code -ne 0 ]]; then + aws_cli_error_log $error_code + errecho "ERROR: AWS reports update-access-key operation failed.\n$response" + return 1 + fi + + iecho "update-access-key response: $response" + iecho + + return 0 +} +# snippet-end:[aws-cli.bash-linux.iam.UpdateAccessKey] + # snippet-start:[aws-cli.bash-linux.iam.DeleteAccessKey] ############################################################################### # function iam_delete_access_key @@ -805,7 +922,7 @@ function iam_delete_access_key() { # bashsupport disable=BP5008 function usage() { echo "function iam_delete_access_key" - echo "Deletes an WS Identity and Access Management (IAM) access key for the specified IAM user" + echo "Deletes an AWS Identity and Access Management (IAM) access key for the specified IAM user" echo " -u user_name The name of the user." echo " -k access_key The access key to delete." echo "" @@ -885,7 +1002,7 @@ function iam_delete_user() { # bashsupport disable=BP5008 function usage() { echo "function iam_delete_user" - echo "Deletes an WS Identity and Access Management (IAM) user. You must supply a username:" + echo "Deletes an AWS Identity and Access Management (IAM) user. You must supply a username:" echo " -u user_name The name of the user." echo "" } diff --git a/cpp/example_code/iot/README.md b/cpp/example_code/iot/README.md index 9839fed6638..9497e020860 100644 --- a/cpp/example_code/iot/README.md +++ b/cpp/example_code/iot/README.md @@ -99,8 +99,19 @@ This example shows you how to get started using AWS IoT. #### Learn the basics -This example shows you how to work with AWS IoT device management. - +This example shows you how to do the following: + +- Create an AWS IoT Thing. +- Generate a device certificate. +- Update an AWS IoT Thing with Attributes. +- Return a unique endpoint. +- List your AWS IoT certificates. +- Create an AWS IoT shadow. +- Write out state information. +- Creates a rule. +- List your rules. +- Search things using the Thing name. +- Delete an AWS IoT Thing. @@ -140,4 +151,4 @@ This example shows you how to work with AWS IoT device management. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/Bedrock-runtime/.gitignore b/dotnetv3/Bedrock-runtime/.gitignore index ba964e2a8e7..98ae3975334 100644 --- a/dotnetv3/Bedrock-runtime/.gitignore +++ b/dotnetv3/Bedrock-runtime/.gitignore @@ -1,2 +1,3 @@ /.vs/ /Tools/ +**/generated-images/ diff --git a/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln b/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln index f495e60e9b1..dd290cac66d 100644 --- a/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln +++ b/dotnetv3/Bedrock-runtime/BedrockRuntimeExamples.sln @@ -94,6 +94,26 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStre EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "InvokeModelWithResponseStream", "Models\AnthropicClaude\InvokeModelWithResponseStream\InvokeModelWithResponseStream.csproj", "{C75F2BBE-7C84-4B01-9836-7279DAE41499}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNovaText", "AmazonNovaText", "{02EA681E-C7D8-13C7-8484-4AC65E1B71E8}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNova", "AmazonNova", "{3AF63EC9-2EB0-4A0B-8C3B-0CA3595080F6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Models\AmazonNova\AmazonNovaText\Converse\Converse.csproj", "{2E4C9BFE-C49C-0567-D73C-F2915AB62CA6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Models\AmazonNova\AmazonNovaText\ConverseStream\ConverseStream.csproj", "{E144492A-337A-0755-EAB4-DA083C3A2DDB}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNovaCanvas", "AmazonNovaCanvas", "{4D3E429C-CCAE-42DE-A062-4717E71D8403}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Models\AmazonNova\AmazonNovaCanvas\InvokeModel\InvokeModel.csproj", "{2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Actions", "Actions", "{FDC95D1E-41C6-45A5-BF29-F76FCC3DAEF9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "BedrockRuntimeActions", "Actions\BedrockRuntimeActions.csproj", "{ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scenarios", "Scenarios", "{045D214B-6181-43B0-ABFE-246675F4D967}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseToolScenario", "Scenarios\ConverseToolScenario\ConverseToolScenario.csproj", "{C0A5B872-03F5-4865-9349-7A403591C50E}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -200,6 +220,26 @@ Global {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Debug|Any CPU.Build.0 = Debug|Any CPU {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Release|Any CPU.ActiveCfg = Release|Any CPU {C75F2BBE-7C84-4B01-9836-7279DAE41499}.Release|Any CPU.Build.0 = Release|Any CPU + {2E4C9BFE-C49C-0567-D73C-F2915AB62CA6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2E4C9BFE-C49C-0567-D73C-F2915AB62CA6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2E4C9BFE-C49C-0567-D73C-F2915AB62CA6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2E4C9BFE-C49C-0567-D73C-F2915AB62CA6}.Release|Any CPU.Build.0 = Release|Any CPU + {E144492A-337A-0755-EAB4-DA083C3A2DDB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E144492A-337A-0755-EAB4-DA083C3A2DDB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E144492A-337A-0755-EAB4-DA083C3A2DDB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E144492A-337A-0755-EAB4-DA083C3A2DDB}.Release|Any CPU.Build.0 = Release|Any CPU + {2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D}.Release|Any CPU.Build.0 = Release|Any CPU + {ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC}.Release|Any CPU.Build.0 = Release|Any CPU + {C0A5B872-03F5-4865-9349-7A403591C50E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0A5B872-03F5-4865-9349-7A403591C50E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0A5B872-03F5-4865-9349-7A403591C50E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0A5B872-03F5-4865-9349-7A403591C50E}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -227,6 +267,7 @@ Global {3D6441FC-0FE8-4D0C-910D-3D9310599C71} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} {D1B0719F-4F84-4DBC-BCAD-E856FB3193D7} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} {1E62D4FB-CC59-4F1E-BB22-574CEC08C94B} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} + {B753CEB9-EA53-4AE1-997E-B7D54A299D58} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {2A6989CB-B273-4841-BD3E-7B1BBA4DD25F} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {BCC66C37-4980-484F-819D-066D2FF2669C} = {EF45C0B9-ED76-4B7A-A0A7-F102E979B71C} {52CDA3F4-F090-4224-978A-5F42388DCF92} = {3F96ECB4-1644-43E8-8643-2CDCF9E679F1} @@ -235,6 +276,14 @@ Global {4B5A00D6-B9F1-449F-A9D2-80E860D6BD75} = {65504C76-7E32-4A12-A42E-BCDA4FE79BC1} {EFC7D088-EF45-464B-97CD-0BBA486B224A} = {BBB79D3E-5DF2-4FF6-B467-52D0EEB91C4B} {C75F2BBE-7C84-4B01-9836-7279DAE41499} = {8BAC2322-AD3C-484A-B51D-8263BC4E6646} + {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} = {3AF63EC9-2EB0-4A0B-8C3B-0CA3595080F6} + {3AF63EC9-2EB0-4A0B-8C3B-0CA3595080F6} = {41B69207-8F29-41BC-9114-78EE740485C8} + {2E4C9BFE-C49C-0567-D73C-F2915AB62CA6} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {E144492A-337A-0755-EAB4-DA083C3A2DDB} = {02EA681E-C7D8-13C7-8484-4AC65E1B71E8} + {4D3E429C-CCAE-42DE-A062-4717E71D8403} = {3AF63EC9-2EB0-4A0B-8C3B-0CA3595080F6} + {2B39D4E2-C6B6-4340-A9AD-5F5C25CA8C1D} = {4D3E429C-CCAE-42DE-A062-4717E71D8403} + {ABA0C307-C7A1-4BBE-A7E2-4BA7163559FC} = {FDC95D1E-41C6-45A5-BF29-F76FCC3DAEF9} + {C0A5B872-03F5-4865-9349-7A403591C50E} = {045D214B-6181-43B0-ABFE-246675F4D967} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {E48A5088-1BBB-4A8B-9AB2-CC5CE0482466} diff --git a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj index 8475494e76e..c26f412667b 100644 --- a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj index bf2403af903..986018da574 100644 --- a/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.cs b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.cs new file mode 100644 index 00000000000..6db2f957aef --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.cs @@ -0,0 +1,128 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[BedrockRuntime.dotnetv3.InvokeModel_AmazonNovaImageGeneration] +// Use the native inference API to create an image with Amazon Nova Canvas. + +using System; +using System.IO; +using System.Text.Json; +using System.Text.Json.Nodes; +using Amazon; +using Amazon.BedrockRuntime; +using Amazon.BedrockRuntime.Model; + +// Create a Bedrock Runtime client in the AWS Region you want to use. +var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); + +// Set the model ID. +var modelId = "amazon.nova-canvas-v1:0"; + +// Define the image generation prompt for the model. +var prompt = "A stylized picture of a cute old steampunk robot."; + +// Create a random seed between 0 and 858,993,459 +int seed = new Random().Next(0, 858993460); + +//Format the request payload using the model's native structure. +var nativeRequest = JsonSerializer.Serialize(new +{ + taskType = "TEXT_IMAGE", + textToImageParams = new + { + text = prompt + }, + imageGenerationConfig = new + { + seed, + quality = "standard", + width = 512, + height = 512, + numberOfImages = 1 + } +}); + +// Create a request with the model ID and the model's native request payload. +var request = new InvokeModelRequest() +{ + ModelId = modelId, + Body = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(nativeRequest)), + ContentType = "application/json" +}; + +try +{ + // Send the request to the Bedrock Runtime and wait for the response. + var response = await client.InvokeModelAsync(request); + + // Decode the response body. + var modelResponse = await JsonNode.ParseAsync(response.Body); + + // Extract the image data. + var base64Image = modelResponse["images"]?[0].ToString() ?? ""; + + // Save the image in a local folder + string savedPath = AmazonNovaCanvas.InvokeModel.SaveBase64Image(base64Image); + Console.WriteLine($"Image saved to: {savedPath}"); +} +catch (AmazonBedrockRuntimeException e) +{ + Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); + throw; +} + +// snippet-end:[BedrockRuntime.dotnetv3.InvokeModel_AmazonNovaImageGeneration] + +// Create a partial class to make the top-level script testable. +namespace AmazonNovaCanvas +{ + public partial class InvokeModel + { + public static string SaveBase64Image(string base64String, string outputFolderName = "generated-images") + { + // Get the directory where the script is located + string scriptDirectory = AppDomain.CurrentDomain.BaseDirectory; + + // Navigate to the script's folder + if (scriptDirectory.Contains("bin")) + { + scriptDirectory = Directory.GetParent(scriptDirectory)?.Parent?.Parent?.Parent?.FullName + ?? throw new DirectoryNotFoundException("Could not find script directory"); + } + + // Combine script directory with output folder + string outputPath = Path.Combine(scriptDirectory, outputFolderName); + + // Create directory if it doesn't exist + if (!Directory.Exists(outputPath)) + { + Directory.CreateDirectory(outputPath); + } + + // Remove base64 header if present (e.g., "data:image/jpeg;base64,") + string base64Data = base64String; + if (base64String.Contains(",")) + { + base64Data = base64String.Split(',')[1]; + } + + // Convert base64 to bytes + byte[] imageBytes = Convert.FromBase64String(base64Data); + + // Find the next available number + int fileNumber = 1; + string filePath; + do + { + string paddedNumber = fileNumber.ToString("D2"); // Pads with leading zero + filePath = Path.Combine(outputPath, $"image_{paddedNumber}.jpg"); + fileNumber++; + } while (File.Exists(filePath)); + + // Save the image + File.WriteAllBytes(filePath, imageBytes); + + return filePath; + } + } +} \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.csproj new file mode 100644 index 00000000000..0db5411af0b --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.csproj @@ -0,0 +1,12 @@ + + + Exe + net8.0 + AmazonNovaCanvas.$(MSBuildProjectName) + + + + + + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.cs b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.cs new file mode 100644 index 00000000000..46466fec1d2 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.cs @@ -0,0 +1,60 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[BedrockRuntime.dotnetv3.Converse_AmazonNovaText] +// Use the Converse API to send a text message to Amazon Nova. + +using System; +using System.Collections.Generic; +using Amazon; +using Amazon.BedrockRuntime; +using Amazon.BedrockRuntime.Model; + +// Create a Bedrock Runtime client in the AWS Region you want to use. +var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); + +// Set the model ID, e.g., Amazon Nova Lite. +var modelId = "amazon.nova-lite-v1:0"; + +// Define the user message. +var userMessage = "Describe the purpose of a 'hello world' program in one line."; + +// Create a request with the model ID, the user message, and an inference configuration. +var request = new ConverseRequest +{ + ModelId = modelId, + Messages = new List + { + new Message + { + Role = ConversationRole.User, + Content = new List { new ContentBlock { Text = userMessage } } + } + }, + InferenceConfig = new InferenceConfiguration() + { + MaxTokens = 512, + Temperature = 0.5F, + TopP = 0.9F + } +}; + +try +{ + // Send the request to the Bedrock Runtime and wait for the result. + var response = await client.ConverseAsync(request); + + // Extract and print the response text. + string responseText = response?.Output?.Message?.Content?[0]?.Text ?? ""; + Console.WriteLine(responseText); +} +catch (AmazonBedrockRuntimeException e) +{ + Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); + throw; +} + +// snippet-end:[BedrockRuntime.dotnetv3.Converse_AmazonNovaText] + +// Create a partial class to make the top-level script testable. +namespace AmazonNovaText { public partial class Converse { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.csproj new file mode 100644 index 00000000000..5fa769392db --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/Converse/Converse.csproj @@ -0,0 +1,12 @@ + + + Exe + net8.0 + AmazonNovaText.$(MSBuildProjectName) + + + + + + + diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.cs b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.cs new file mode 100644 index 00000000000..69ff7825dd9 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.cs @@ -0,0 +1,67 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[BedrockRuntime.dotnetv3.ConverseStream_AmazonNovaText] +// Use the Converse API to send a text message to Amazon Nova +// and print the response stream. + +using System; +using System.Collections.Generic; +using System.Linq; +using Amazon; +using Amazon.BedrockRuntime; +using Amazon.BedrockRuntime.Model; + +// Create a Bedrock Runtime client in the AWS Region you want to use. +var client = new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1); + +// Set the model ID, e.g., Amazon Nova Lite. +var modelId = "amazon.nova-lite-v1:0"; + +// Define the user message. +var userMessage = "Describe the purpose of a 'hello world' program in one line."; + +// Create a request with the model ID, the user message, and an inference configuration. +var request = new ConverseStreamRequest +{ + ModelId = modelId, + Messages = new List + { + new Message + { + Role = ConversationRole.User, + Content = new List { new ContentBlock { Text = userMessage } } + } + }, + InferenceConfig = new InferenceConfiguration() + { + MaxTokens = 512, + Temperature = 0.5F, + TopP = 0.9F + } +}; + +try +{ + // Send the request to the Bedrock Runtime and wait for the result. + var response = await client.ConverseStreamAsync(request); + + // Extract and print the streamed response text in real-time. + foreach (var chunk in response.Stream.AsEnumerable()) + { + if (chunk is ContentBlockDeltaEvent) + { + Console.Write((chunk as ContentBlockDeltaEvent).Delta.Text); + } + } +} +catch (AmazonBedrockRuntimeException e) +{ + Console.WriteLine($"ERROR: Can't invoke '{modelId}'. Reason: {e.Message}"); + throw; +} + +// snippet-end:[BedrockRuntime.dotnetv3.ConverseStream_AmazonNovaText] + +// Create a partial class to make the top-level script testable. +namespace AmazonNovaText { public partial class ConverseStream { } } \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.csproj new file mode 100644 index 00000000000..1260dc1d435 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.csproj @@ -0,0 +1,12 @@ + + + Exe + net8.0 + AmazonNovaText.$(MSBuildProjectName) + + + + + + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj index e505af96607..3651f4be200 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj index 5752f31c880..662bf35f3e1 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj index 5752f31c880..662bf35f3e1 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 5752f31c880..662bf35f3e1 100644 --- a/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AmazonTitanText/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj index 7f752984648..9a843cc1582 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj index e4e6c3bb250..72b5e19f0b8 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj index e4e6c3bb250..72b5e19f0b8 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 0daad35f8b9..6e4dbdf489b 100644 --- a/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/AnthropicClaude/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModel/Command_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_InvokeModelWithResponseStream/Command_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModel/Command_R_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Command_R_InvokeModelWithResponseStream/Command_R_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj index 402f8c682cb..b79f0fd7312 100644 --- a/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/CohereCommand/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj index f91317c7fa6..6163a7486a7 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj index f91317c7fa6..6163a7486a7 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj index f91317c7fa6..6163a7486a7 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModel/Llama3_InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj index f91317c7fa6..6163a7486a7 100644 --- a/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/MetaLlama/Llama3_InvokeModelWithResponseStream/Llama3_InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj index 27e936ccbc6..dfbd70d9fc3 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/Converse/Converse.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj index 8297baab449..9f570ade55e 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/ConverseStream/ConverseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj index 8297baab449..9f570ade55e 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModel/InvokeModel.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj index 8297baab449..9f570ade55e 100644 --- a/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj +++ b/dotnetv3/Bedrock-runtime/Models/Mistral/InvokeModelWithResponseStream/InvokeModelWithResponseStream.csproj @@ -6,7 +6,7 @@ - - + + diff --git a/dotnetv3/Bedrock-runtime/README.md b/dotnetv3/Bedrock-runtime/README.md index 9e153544bb9..174ab4345b8 100644 --- a/dotnetv3/Bedrock-runtime/README.md +++ b/dotnetv3/Bedrock-runtime/README.md @@ -28,11 +28,28 @@ For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv3 +### Scenarios + +Code examples that show you how to accomplish a specific task by calling multiple +functions within the same service. + +- [Tool use with the Converse API](Scenarios/ConverseToolScenario/ConverseToolScenario.cs) + ### AI21 Labs Jurassic-2 - [Converse](Models/Ai21LabsJurassic2/Converse/Converse.cs#L4) - [InvokeModel](Models/Ai21LabsJurassic2/InvokeModel/InvokeModel.cs#L4) +### Amazon Nova + +- [Converse](Models/AmazonNova/AmazonNovaText/Converse/Converse.cs#L4) +- [ConverseStream](Models/AmazonNova/AmazonNovaText/ConverseStream/ConverseStream.cs#L4) +- [Scenario: Tool use with the Converse API](Scenarios/ConverseToolScenario/ConverseToolScenario.cs#L4) + +### Amazon Nova Canvas + +- [InvokeModel](Models/AmazonNova/AmazonNovaCanvas/InvokeModel/InvokeModel.cs#L4) + ### Amazon Titan Text - [Converse](Models/AmazonTitanText/Converse/Converse.cs#L4) @@ -101,6 +118,18 @@ Alternatively, you can run the example from within your IDE. +#### Tool use with the Converse API + +This example shows you how to build a typical interaction between an application, a generative AI model, and connected tools or APIs to mediate interactions between the AI and the outside world. It uses the example of connecting an external weather API to the AI model so it can provide real-time weather information based on user input. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. @@ -127,4 +156,4 @@ in the `dotnetv3` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/BedrockActionsWrapper.cs b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/BedrockActionsWrapper.cs new file mode 100644 index 00000000000..af2d1859e16 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/BedrockActionsWrapper.cs @@ -0,0 +1,82 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +using Amazon.BedrockRuntime; +using Amazon.BedrockRuntime.Model; +using Microsoft.Extensions.Logging; + +namespace ConverseToolScenario; + +// snippet-start:[Bedrock.ConverseTool.dotnetv3.SendConverseRequest] + +/// +/// Wrapper class for interacting with the Amazon Bedrock Converse API. +/// +public class BedrockActionsWrapper +{ + private readonly IAmazonBedrockRuntime _bedrockClient; + private readonly ILogger _logger; + + /// + /// Initializes a new instance of the class. + /// + /// The Bedrock Converse API client. + /// The logger instance. + public BedrockActionsWrapper(IAmazonBedrockRuntime bedrockClient, ILogger logger) + { + _bedrockClient = bedrockClient; + _logger = logger; + } + + /// + /// Sends a Converse request to the Amazon Bedrock Converse API. + /// + /// The Bedrock Model Id. + /// A system prompt instruction. + /// The array of messages in the conversation. + /// The specification for a tool. + /// The response of the model. + public async Task SendConverseRequestAsync(string modelId, string systemPrompt, List conversation, ToolSpecification toolSpec) + { + try + { + var request = new ConverseRequest() + { + ModelId = modelId, + System = new List() + { + new SystemContentBlock() + { + Text = systemPrompt + } + }, + Messages = conversation, + ToolConfig = new ToolConfiguration() + { + Tools = new List() + { + new Tool() + { + ToolSpec = toolSpec + } + } + } + }; + + var response = await _bedrockClient.ConverseAsync(request); + + return response; + } + catch (ModelNotReadyException ex) + { + _logger.LogError(ex, "Model not ready, please wait and try again."); + throw; + } + catch (AmazonBedrockRuntimeException ex) + { + _logger.LogError(ex, "Error occurred while sending Converse request."); + throw; + } + } +} +// snippet-end:[Bedrock.ConverseTool.dotnetv3.SendConverseRequest] \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.cs b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.cs new file mode 100644 index 00000000000..f220fd4c3d6 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.cs @@ -0,0 +1,361 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Bedrock.ConverseTool.dotnetv3.Scenario] + +using Amazon; +using Amazon.BedrockRuntime; +using Amazon.BedrockRuntime.Model; +using Amazon.Runtime.Documents; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Console; + +namespace ConverseToolScenario; + +public static class ConverseToolScenario +{ + /* + Before running this .NET code example, set up your development environment, including your credentials. + + This demo illustrates a tool use scenario using Amazon Bedrock's Converse API and a weather tool. + The script interacts with a foundation model on Amazon Bedrock to provide weather information based on user + input. It uses the Open-Meteo API (https://open-meteo.com) to retrieve current weather data for a given location. + */ + + public static BedrockActionsWrapper _bedrockActionsWrapper = null!; + public static WeatherTool _weatherTool = null!; + public static bool _interactive = true; + + // Change this string to use a different model with Converse API. + private static string model_id = "amazon.nova-lite-v1:0"; + + private static string system_prompt = @" + You are a weather assistant that provides current weather data for user-specified locations using only + the Weather_Tool, which expects latitude and longitude. Infer the coordinates from the location yourself. + If the user provides coordinates, infer the approximate location and refer to it in your response. + To use the tool, you strictly apply the provided tool specification. + + - Explain your step-by-step process, and give brief updates before each step. + - Only use the Weather_Tool for data. Never guess or make up information. + - Repeat the tool use for subsequent requests if necessary. + - If the tool errors, apologize, explain weather is unavailable, and suggest other options. + - Report temperatures in °C (°F) and wind in km/h (mph). Keep weather reports concise. Sparingly use + emojis where appropriate. + - Only respond to weather queries. Remind off-topic users of your purpose. + - Never claim to search online, access external data, or use tools besides Weather_Tool. + - Complete the entire process until you have all required data before sending the complete response. + " + ; + + private static string default_prompt = "What is the weather like in Seattle?"; + + // The maximum number of recursive calls allowed in the tool use function. + // This helps prevent infinite loops and potential performance issues. + private static int max_recursions = 5; + + public static async Task Main(string[] args) + { + // Set up dependency injection for the Amazon service. + using var host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => + logging.AddFilter("System", LogLevel.Error) + .AddFilter("Microsoft", LogLevel.Trace)) + .ConfigureServices((_, services) => + services.AddHttpClient() + .AddSingleton(_ => new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1)) // Specify a region that has access to the chosen model. + .AddTransient() + .AddTransient() + .RemoveAll() + ) + .Build(); + + ServicesSetup(host); + + try + { + await RunConversationAsync(); + + } + catch (Exception ex) + { + Console.WriteLine(new string('-', 80)); + Console.WriteLine($"There was a problem running the scenario: {ex.Message}"); + Console.WriteLine(new string('-', 80)); + } + finally + { + Console.WriteLine( + "Amazon Bedrock Converse API with Tool Use Feature Scenario is complete."); + Console.WriteLine(new string('-', 80)); + } + } + + /// + /// Populate the services for use within the console application. + /// + /// The services host. + private static void ServicesSetup(IHost host) + { + _bedrockActionsWrapper = host.Services.GetRequiredService(); + _weatherTool = host.Services.GetRequiredService(); + } + + /// + /// Starts the conversation with the user and handles the interaction with Bedrock. + /// + /// The conversation array. + public static async Task> RunConversationAsync() + { + // Print the greeting and a short user guide + PrintHeader(); + + // Start with an empty conversation + var conversation = new List(); + + // Get the first user input + var userInput = await GetUserInputAsync(); + + while (userInput != null) + { + // Create a new message with the user input and append it to the conversation + var message = new Message { Role = ConversationRole.User, Content = new List { new ContentBlock { Text = userInput } } }; + conversation.Add(message); + + // Send the conversation to Amazon Bedrock + var bedrockResponse = await SendConversationToBedrock(conversation); + + // Recursively handle the model's response until the model has returned its final response or the recursion counter has reached 0 + await ProcessModelResponseAsync(bedrockResponse, conversation, max_recursions); + + // Repeat the loop until the user decides to exit the application + userInput = await GetUserInputAsync(); + } + + PrintFooter(); + return conversation; + } + + /// + /// Sends the conversation, the system prompt, and the tool spec to Amazon Bedrock, and returns the response. + /// + /// The conversation history including the next message to send. + /// The response from Amazon Bedrock. + private static async Task SendConversationToBedrock(List conversation) + { + Console.WriteLine("\tCalling Bedrock..."); + + // Send the conversation, system prompt, and tool configuration, and return the response + return await _bedrockActionsWrapper.SendConverseRequestAsync(model_id, system_prompt, conversation, _weatherTool.GetToolSpec()); + } + + /// + /// Processes the response received via Amazon Bedrock and performs the necessary actions based on the stop reason. + /// + /// The model's response returned via Amazon Bedrock. + /// The conversation history. + /// The maximum number of recursive calls allowed. + private static async Task ProcessModelResponseAsync(ConverseResponse modelResponse, List conversation, int maxRecursion) + { + if (maxRecursion <= 0) + { + // Stop the process, the number of recursive calls could indicate an infinite loop + Console.WriteLine("\tWarning: Maximum number of recursions reached. Please try again."); + } + + // Append the model's response to the ongoing conversation + conversation.Add(modelResponse.Output.Message); + + if (modelResponse.StopReason == "tool_use") + { + // If the stop reason is "tool_use", forward everything to the tool use handler + await HandleToolUseAsync(modelResponse.Output, conversation, maxRecursion - 1); + } + + if (modelResponse.StopReason == "end_turn") + { + // If the stop reason is "end_turn", print the model's response text, and finish the process + PrintModelResponse(modelResponse.Output.Message.Content[0].Text); + if (!_interactive) + { + default_prompt = "x"; + } + } + } + + /// + /// Handles the tool use case by invoking the specified tool and sending the tool's response back to Bedrock. + /// The tool response is appended to the conversation, and the conversation is sent back to Amazon Bedrock for further processing. + /// + /// The model's response containing the tool use request. + /// The conversation history. + /// The maximum number of recursive calls allowed. + public static async Task HandleToolUseAsync(ConverseOutput modelResponse, List conversation, int maxRecursion) + { + // Initialize an empty list of tool results + var toolResults = new List(); + + // The model's response can consist of multiple content blocks + foreach (var contentBlock in modelResponse.Message.Content) + { + if (!String.IsNullOrEmpty(contentBlock.Text)) + { + // If the content block contains text, print it to the console + PrintModelResponse(contentBlock.Text); + } + + if (contentBlock.ToolUse != null) + { + // If the content block is a tool use request, forward it to the tool + var toolResponse = await InvokeTool(contentBlock.ToolUse); + + // Add the tool use ID and the tool's response to the list of results + toolResults.Add(new ContentBlock + { + ToolResult = new ToolResultBlock() + { + ToolUseId = toolResponse.ToolUseId, + Content = new List() + { new ToolResultContentBlock { Json = toolResponse.Content } } + } + }); + } + } + + // Embed the tool results in a new user message + var message = new Message() { Role = ConversationRole.User, Content = toolResults }; + + // Append the new message to the ongoing conversation + conversation.Add(message); + + // Send the conversation to Amazon Bedrock + var response = await SendConversationToBedrock(conversation); + + // Recursively handle the model's response until the model has returned its final response or the recursion counter has reached 0 + await ProcessModelResponseAsync(response, conversation, maxRecursion); + } + + /// + /// Invokes the specified tool with the given payload and returns the tool's response. + /// If the requested tool does not exist, an error message is returned. + /// + /// The payload containing the tool name and input data. + /// The tool's response or an error message. + public static async Task InvokeTool(ToolUseBlock payload) + { + var toolName = payload.Name; + + if (toolName == "Weather_Tool") + { + var inputData = payload.Input.AsDictionary(); + PrintToolUse(toolName, inputData); + + // Invoke the weather tool with the input data provided + var weatherResponse = await _weatherTool.FetchWeatherDataAsync(inputData["latitude"].ToString(), inputData["longitude"].ToString()); + return new ToolResponse { ToolUseId = payload.ToolUseId, Content = weatherResponse }; + } + else + { + var errorMessage = $"\tThe requested tool with name '{toolName}' does not exist."; + return new ToolResponse { ToolUseId = payload.ToolUseId, Content = new { error = true, message = errorMessage } }; + } + } + + + /// + /// Prompts the user for input and returns the user's response. + /// Returns null if the user enters 'x' to exit. + /// + /// The prompt to display to the user. + /// The user's input or null if the user chooses to exit. + private static async Task GetUserInputAsync(string prompt = "\tYour weather info request:") + { + var userInput = default_prompt; + if (_interactive) + { + Console.WriteLine(new string('*', 80)); + Console.WriteLine($"{prompt} (x to exit): \n\t"); + userInput = Console.ReadLine(); + } + + if (string.IsNullOrWhiteSpace(userInput)) + { + prompt = "\tPlease enter your weather info request, e.g. the name of a city"; + return await GetUserInputAsync(prompt); + } + + if (userInput.ToLowerInvariant() == "x") + { + return null; + } + + return userInput; + } + + /// + /// Logs the welcome message and usage guide for the tool use demo. + /// + public static void PrintHeader() + { + Console.WriteLine(@" + ================================================= + Welcome to the Amazon Bedrock Tool Use demo! + ================================================= + + This assistant provides current weather information for user-specified locations. + You can ask for weather details by providing the location name or coordinates. Weather information + will be provided using a custom Tool and open-meteo API. + + Example queries: + - What's the weather like in New York? + - Current weather for latitude 40.70, longitude -74.01 + - Is it warmer in Rome or Barcelona today? + + To exit the program, simply type 'x' and press Enter. + + P.S.: You're not limited to single locations, or even to using English! + Have fun and experiment with the app! + "); + } + + /// + /// Logs the footer information for the tool use demo. + /// + public static void PrintFooter() + { + Console.WriteLine(@" + ================================================= + Thank you for checking out the Amazon Bedrock Tool Use demo. We hope you + learned something new, or got some inspiration for your own apps today! + + For more Bedrock examples in different programming languages, have a look at: + https://docs.aws.amazon.com/bedrock/latest/userguide/service_code_examples.html + ================================================= + "); + } + + /// + /// Logs information about the tool use. + /// + /// The name of the tool being used. + /// The input data for the tool. + public static void PrintToolUse(string toolName, Dictionary inputData) + { + Console.WriteLine($"\n\tInvoking tool: {toolName} with input: {inputData["latitude"].ToString()}, {inputData["longitude"].ToString()}...\n"); + } + + /// + /// Logs the model's response. + /// + /// The model's response message. + public static void PrintModelResponse(string message) + { + Console.WriteLine("\tThe model's response:\n"); + Console.WriteLine(message); + Console.WriteLine(); + } +} +// snippet-end:[Bedrock.ConverseTool.dotnetv3.Scenario] \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.csproj b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.csproj new file mode 100644 index 00000000000..6d77e9066e9 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ConverseToolScenario.csproj @@ -0,0 +1,19 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + + diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/README.md b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/README.md new file mode 100644 index 00000000000..0c052ac1e45 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/README.md @@ -0,0 +1,59 @@ +# Bedrock Runtime Converse API with Tool Feature Scenario + +## Overview + +This example shows how to use AWS SDKs and the Amazon Bedrock Converse API to call a custom tool from a large language model (LLM) as part of a multistep conversation. The example creates a weather tool that leverages the Open-Meteo API to retrieve current weather information based on user input. + +[Bedrock Converse API with Tool Definition](https://docs.aws.amazon.com/bedrock/latest/userguide/tool-use-inference-call.html). + +## ⚠ Important + +* Running this code might result in charges to your AWS account. +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + +## Scenario + +This example illustrates a typical interaction between a generative AI model, an application, and connected tools or APIs to solve a problem or achieve a specific goal. The scenario follows these steps: + +1. Set up the system prompt and tool configuration. +2. Specify the AI model to be used (e.g., Anthropic Claude 3 Sonnet). +3. Create a client to interact with Amazon Bedrock. +4. Prompt the user for their weather request. +5. Send the user input including the conversation history to the model. +6. The model processes the input and determines if a connected tool or API needs to be used. If this is the case, the model returns a tool use request with specific parameters needed to invoke the tool, and a unique tool use ID to correlate tool responses to the request. +7. The scenario application invokes the tool to fetch weather data, and append the response and tool use ID to the conversation. +8. The model uses the tool response to generate a final response. If additional tool requests are needed, the process is repeated. +9. Once the final response is received and printed, the application returns to the prompt. + +### Prerequisites + +For general prerequisites, see the [README](../../../README.md) in the `dotnetv3` folder. + +### Resources + +No additional resources are needed for this scenario. + +### Instructions + +After the example compiles, you can run it from the command line. To do so, navigate to +the folder that contains the .sln file and run the following command: + +``` +dotnet run +``` + +Alternatively, you can run the example from within your IDE. + +This starts an interactive scenario that walks you through exploring conditional requests for read, write, and copy operations. + +## Additional resources + +- [Documentation: The Amazon Bedrock User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) +- [Tutorials: A developer's guide to Bedrock's new Converse API](https://community.aws/content/2dtauBCeDa703x7fDS9Q30MJoBA/amazon-bedrock-converse-api-developer-guide) +- [More examples: Amazon Bedrock code examples and scenarios in multiple programming languages](https://docs.aws.amazon.com/bedrock/latest/userguide/service_code_examples.html) + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ToolResponse.cs b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ToolResponse.cs new file mode 100644 index 00000000000..95dbb986fa1 --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/ToolResponse.cs @@ -0,0 +1,16 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Bedrock.ConverseTool.dotnetv3.ToolResponse] + +namespace ConverseToolScenario; + +/// +/// Response object for the tool results. +/// +public class ToolResponse +{ + public string ToolUseId { get; set; } = null!; + public dynamic Content { get; set; } = null!; +} +// snippet-end:[Bedrock.ConverseTool.dotnetv3.ToolResponse] \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/WeatherTool.cs b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/WeatherTool.cs new file mode 100644 index 00000000000..1e87b25927b --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/WeatherTool.cs @@ -0,0 +1,98 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Bedrock.ConverseTool.dotnetv3.WeatherTool] + +using Amazon.BedrockRuntime.Model; +using Amazon.Runtime.Documents; +using Microsoft.Extensions.Logging; + +namespace ConverseToolScenario; + +/// +/// Weather tool that will be invoked when requested by the Bedrock response. +/// +public class WeatherTool +{ + private readonly ILogger _logger; + private readonly IHttpClientFactory _httpClientFactory; + + public WeatherTool(ILogger logger, IHttpClientFactory httpClientFactory) + { + _logger = logger; + _httpClientFactory = httpClientFactory; + } + + /// + /// Returns the JSON Schema specification for the Weather tool. The tool specification + /// defines the input schema and describes the tool's functionality. + /// For more information, see https://json-schema.org/understanding-json-schema/reference. + /// + /// The tool specification for the Weather tool. + public ToolSpecification GetToolSpec() + { + ToolSpecification toolSpecification = new ToolSpecification(); + + toolSpecification.Name = "Weather_Tool"; + toolSpecification.Description = "Get the current weather for a given location, based on its WGS84 coordinates."; + + Document toolSpecDocument = Document.FromObject( + new + { + type = "object", + properties = new + { + latitude = new + { + type = "string", + description = "Geographical WGS84 latitude of the location." + }, + longitude = new + { + type = "string", + description = "Geographical WGS84 longitude of the location." + } + }, + required = new[] { "latitude", "longitude" } + }); + + toolSpecification.InputSchema = new ToolInputSchema() { Json = toolSpecDocument }; + return toolSpecification; + } + + /// + /// Fetches weather data for the given latitude and longitude using the Open-Meteo API. + /// Returns the weather data or an error message if the request fails. + /// + /// The latitude of the location. + /// The longitude of the location. + /// The weather data or an error message. + public async Task FetchWeatherDataAsync(string latitude, string longitude) + { + string endpoint = "https://api.open-meteo.com/v1/forecast"; + + try + { + var httpClient = _httpClientFactory.CreateClient(); + var response = await httpClient.GetAsync($"{endpoint}?latitude={latitude}&longitude={longitude}¤t_weather=True"); + response.EnsureSuccessStatusCode(); + var weatherData = await response.Content.ReadAsStringAsync(); + + Document weatherDocument = Document.FromObject( + new { weather_data = weatherData }); + + return weatherDocument; + } + catch (HttpRequestException e) + { + _logger.LogError(e, "Error fetching weather data: {Message}", e.Message); + throw; + } + catch (Exception e) + { + _logger.LogError(e, "Unexpected error fetching weather data: {Message}", e.Message); + throw; + } + } +} +// snippet-end:[Bedrock.ConverseTool.dotnetv3.WeatherTool] \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs index d57db96634e..bb3fafc643d 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTest_Converse.cs @@ -10,6 +10,7 @@ public class ActionTest_Converse [InlineData(typeof(MetaLlama.Converse))] [InlineData(typeof(CohereCommand.Converse))] [InlineData(typeof(AnthropicClaude.Converse))] + [InlineData(typeof(AmazonNovaText.Converse))] [InlineData(typeof(AmazonTitanText.Converse))] [InlineData(typeof(Ai21LabsJurassic2.Converse))] public void ConverseDoesNotThrow(Type type) diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs index 3c4ab3417f0..0f6ca41ccac 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTest_ConverseStream.cs @@ -10,6 +10,7 @@ public class ActionTest_ConverseStream [InlineData(typeof(MetaLlama.ConverseStream))] [InlineData(typeof(CohereCommand.ConverseStream))] [InlineData(typeof(AnthropicClaude.ConverseStream))] + [InlineData(typeof(AmazonNovaText.ConverseStream))] [InlineData(typeof(AmazonTitanText.ConverseStream))] public void ConverseStreamDoesNotThrow(Type type) { diff --git a/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs b/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs index 0584cf61793..0b561dc2176 100644 --- a/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs +++ b/dotnetv3/Bedrock-runtime/Tests/ActionTests_InvokeModel.cs @@ -13,6 +13,7 @@ public class ActionTest_InvokeModel [InlineData(typeof(AnthropicClaude.InvokeModel))] [InlineData(typeof(AmazonTitanText.InvokeModel))] [InlineData(typeof(Ai21LabsJurassic2.InvokeModel))] + [InlineData(typeof(AmazonNovaCanvas.InvokeModel))] public void InvokeModelDoesNotThrow(Type type) { var entryPoint = type.Assembly.EntryPoint!; diff --git a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj index b499eb4f7a0..6c0e8620b3e 100644 --- a/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj +++ b/dotnetv3/Bedrock-runtime/Tests/BedrockRuntimeTests.csproj @@ -11,16 +11,16 @@ - - - - - - + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive all - + runtime; build; native; contentfiles; analyzers; buildtransitive all @@ -29,6 +29,10 @@ + + + + @@ -51,6 +55,7 @@ + \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Tests/ConverseToolScenarioTests.cs b/dotnetv3/Bedrock-runtime/Tests/ConverseToolScenarioTests.cs new file mode 100644 index 00000000000..f5660a3774b --- /dev/null +++ b/dotnetv3/Bedrock-runtime/Tests/ConverseToolScenarioTests.cs @@ -0,0 +1,65 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +using Amazon; +using Amazon.BedrockRuntime; +using ConverseToolScenario; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace BedrockRuntimeTests; + +/// +/// Tests for the Converse Tool Use example. +/// +public class ConverseToolScenarioTests +{ + private readonly BedrockActionsWrapper _bedrockActionsWrapper = null!; + private readonly WeatherTool _weatherTool = null!; + private readonly ILoggerFactory _loggerFactory; + + /// + /// Constructor for the test class. + /// + public ConverseToolScenarioTests() + { + + _loggerFactory = LoggerFactory.Create(builder => + { + builder.AddConsole(); + }); + + IServiceCollection services = new ServiceCollection(); // [1] + + services.AddHttpClient(); + + IHttpClientFactory _httpClientFactory = services + .BuildServiceProvider() + .GetRequiredService(); + + _bedrockActionsWrapper = new BedrockActionsWrapper( + new AmazonBedrockRuntimeClient(RegionEndpoint.USEast1), new Logger(_loggerFactory)); + _weatherTool = new WeatherTool(new Logger(_loggerFactory), + _httpClientFactory); + ConverseToolScenario.ConverseToolScenario._bedrockActionsWrapper = _bedrockActionsWrapper; + ConverseToolScenario.ConverseToolScenario._weatherTool = _weatherTool; + } + + /// + /// Run the non-interactive scenario. Should return a non-empty conversation. + /// + /// Async task. + [Fact] + [Trait("Category", "Integration")] + public async Task TestScenario() + { + // Arrange. + ConverseToolScenario.ConverseToolScenario._interactive = false; + + // Act. + var conversation = await ConverseToolScenario.ConverseToolScenario.RunConversationAsync(); + + // Assert. + Assert.NotEmpty(conversation); + } +} \ No newline at end of file diff --git a/dotnetv3/Bedrock-runtime/Tests/GlobalUsings.cs b/dotnetv3/Bedrock-runtime/Tests/GlobalUsings.cs index ef5ce323ba9..0f64a5599c7 100644 --- a/dotnetv3/Bedrock-runtime/Tests/GlobalUsings.cs +++ b/dotnetv3/Bedrock-runtime/Tests/GlobalUsings.cs @@ -2,11 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 global using Xunit; -global using Xunit.Extensions.Ordering; // Optional. -[assembly: CollectionBehavior(DisableTestParallelization = true)] -// Optional. -[assembly: TestCaseOrderer("Xunit.Extensions.Ordering.TestCaseOrderer", "Xunit.Extensions.Ordering")] -// Optional. -[assembly: TestCollectionOrderer("Xunit.Extensions.Ordering.CollectionOrderer", "Xunit.Extensions.Ordering")] \ No newline at end of file +[assembly: CollectionBehavior(DisableTestParallelization = true)] \ No newline at end of file diff --git a/dotnetv3/Cognito/README.md b/dotnetv3/Cognito/README.md index eb9c4e7777b..9158cda1f7e 100644 --- a/dotnetv3/Cognito/README.md +++ b/dotnetv3/Cognito/README.md @@ -34,7 +34,7 @@ These examples also require the following resources: To create these resources, run the AWS CloudFormation script in the -[resources/cdk/cognito_scenario_user_pool_with_mfa](../../../resources/cdk/cognito_scenario_user_pool_with_mfa) +[resources/cdk/cognito_scenario_user_pool_with_mfa](../../resources/cdk/cognito_scenario_user_pool_with_mfa) folder. This script outputs a user pool ID and a client ID that you can use to run the scenario. diff --git a/dotnetv3/DotNetV3Examples.sln b/dotnetv3/DotNetV3Examples.sln index 54a02263eb2..908a808da58 100644 --- a/dotnetv3/DotNetV3Examples.sln +++ b/dotnetv3/DotNetV3Examples.sln @@ -837,6 +837,24 @@ Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "S3ObjectLockScenario", "S3\ EndProject Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "S3ObjectLockTests", "S3\scenarios\S3ObjectLockScenario\S3ObjectLockTests\S3ObjectLockTests.csproj", "{BCCFBED0-E800-46C5-975B-7D404486F00F}" EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseToolScenario", "Bedrock-runtime\Scenarios\ConverseToolScenario\ConverseToolScenario.csproj", "{83ED7BBE-5C9A-47AC-805B-351270069570}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "DynamoDB_Actions", "DynamoDB_Actions", "{72466F30-810F-4963-B748-5154A6C49926}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "DynamoDB_Actions", "dynamodb\scenarios\DynamoDB_Basics\DynamoDB_Actions\DynamoDB_Actions.csproj", "{B8843CE1-23AF-4E54-A916-C3FD94B4FF9A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNova", "AmazonNova", "{9FB5136B-F426-454C-B32D-855E07DBC0FE}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNovaText", "AmazonNovaText", "{6EA5F10D-C016-4AB0-B551-099DBFD74F95}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "ConverseStream", "Bedrock-runtime\Models\AmazonNova\AmazonNovaText\ConverseStream\ConverseStream.csproj", "{C0AC14E2-54E9-426E-8A4A-7B64946A4715}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Converse", "Bedrock-runtime\Models\AmazonNova\AmazonNovaText\Converse\Converse.csproj", "{FD901D0E-B970-42A3-B6E2-219BDA882F19}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "AmazonNovaCanvas", "AmazonNovaCanvas", "{CDA2FA21-36E1-4847-A5A8-AF921C4BBBD7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "InvokeModel", "Bedrock-runtime\Models\AmazonNova\AmazonNovaCanvas\InvokeModel\InvokeModel.csproj", "{1D2CF12A-F46E-4293-ABB3-2FD70D84328F}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -1915,6 +1933,26 @@ Global {BCCFBED0-E800-46C5-975B-7D404486F00F}.Debug|Any CPU.Build.0 = Debug|Any CPU {BCCFBED0-E800-46C5-975B-7D404486F00F}.Release|Any CPU.ActiveCfg = Release|Any CPU {BCCFBED0-E800-46C5-975B-7D404486F00F}.Release|Any CPU.Build.0 = Release|Any CPU + {83ED7BBE-5C9A-47AC-805B-351270069570}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {83ED7BBE-5C9A-47AC-805B-351270069570}.Debug|Any CPU.Build.0 = Debug|Any CPU + {83ED7BBE-5C9A-47AC-805B-351270069570}.Release|Any CPU.ActiveCfg = Release|Any CPU + {83ED7BBE-5C9A-47AC-805B-351270069570}.Release|Any CPU.Build.0 = Release|Any CPU + {B8843CE1-23AF-4E54-A916-C3FD94B4FF9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B8843CE1-23AF-4E54-A916-C3FD94B4FF9A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B8843CE1-23AF-4E54-A916-C3FD94B4FF9A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B8843CE1-23AF-4E54-A916-C3FD94B4FF9A}.Release|Any CPU.Build.0 = Release|Any CPU + {C0AC14E2-54E9-426E-8A4A-7B64946A4715}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0AC14E2-54E9-426E-8A4A-7B64946A4715}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0AC14E2-54E9-426E-8A4A-7B64946A4715}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0AC14E2-54E9-426E-8A4A-7B64946A4715}.Release|Any CPU.Build.0 = Release|Any CPU + {FD901D0E-B970-42A3-B6E2-219BDA882F19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FD901D0E-B970-42A3-B6E2-219BDA882F19}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FD901D0E-B970-42A3-B6E2-219BDA882F19}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FD901D0E-B970-42A3-B6E2-219BDA882F19}.Release|Any CPU.Build.0 = Release|Any CPU + {1D2CF12A-F46E-4293-ABB3-2FD70D84328F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1D2CF12A-F46E-4293-ABB3-2FD70D84328F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1D2CF12A-F46E-4293-ABB3-2FD70D84328F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1D2CF12A-F46E-4293-ABB3-2FD70D84328F}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -2296,6 +2334,15 @@ Global {7EC94891-9A5F-47EF-9C97-8A280754525C} = {0169CEB9-B6A7-447D-921D-C79358DDCCE6} {93588ED1-A248-4F6C-85A4-27E9E65D8AC7} = {7EC94891-9A5F-47EF-9C97-8A280754525C} {BCCFBED0-E800-46C5-975B-7D404486F00F} = {7EC94891-9A5F-47EF-9C97-8A280754525C} + {83ED7BBE-5C9A-47AC-805B-351270069570} = {BA23BB28-EC63-4330-8CA7-DEB1B6489580} + {72466F30-810F-4963-B748-5154A6C49926} = {3F9C4507-5BD7-4AA5-9EE0-538DE08FAF43} + {B8843CE1-23AF-4E54-A916-C3FD94B4FF9A} = {72466F30-810F-4963-B748-5154A6C49926} + {9FB5136B-F426-454C-B32D-855E07DBC0FE} = {6520EB28-F7B4-4581-B3D8-A06E9303B16B} + {6EA5F10D-C016-4AB0-B551-099DBFD74F95} = {9FB5136B-F426-454C-B32D-855E07DBC0FE} + {C0AC14E2-54E9-426E-8A4A-7B64946A4715} = {6EA5F10D-C016-4AB0-B551-099DBFD74F95} + {FD901D0E-B970-42A3-B6E2-219BDA882F19} = {6EA5F10D-C016-4AB0-B551-099DBFD74F95} + {CDA2FA21-36E1-4847-A5A8-AF921C4BBBD7} = {9FB5136B-F426-454C-B32D-855E07DBC0FE} + {1D2CF12A-F46E-4293-ABB3-2FD70D84328F} = {CDA2FA21-36E1-4847-A5A8-AF921C4BBBD7} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {08502818-E8E1-4A91-A51C-4C8C8D4FF9CA} diff --git a/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadData.cs b/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadData.cs index 4c822237176..c1db8de3c26 100644 --- a/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadData.cs +++ b/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadData.cs @@ -71,26 +71,22 @@ public static async Task CreateTableProductCatalog(IAmazo { TableName = tableName, AttributeDefinitions = new List() - { - new AttributeDefinition - { - AttributeName = "Id", - AttributeType = ScalarAttributeType.N, - }, - }, + { + new AttributeDefinition + { + AttributeName = "Id", + AttributeType = ScalarAttributeType.N, + }, + }, KeySchema = new List() - { - new KeySchemaElement - { - AttributeName = "Id", - KeyType = KeyType.HASH, - }, - }, - ProvisionedThroughput = new ProvisionedThroughput + { + new KeySchemaElement { - ReadCapacityUnits = 10, - WriteCapacityUnits = 5, + AttributeName = "Id", + KeyType = KeyType.HASH, }, + }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); var result = await WaitTillTableCreated(client, tableName, response); @@ -112,26 +108,22 @@ public static async Task CreateTableForum(IAmazonDynamoDB { TableName = tableName, AttributeDefinitions = new List() - { - new AttributeDefinition - { - AttributeName = "Name", - AttributeType = ScalarAttributeType.S, - }, - }, + { + new AttributeDefinition + { + AttributeName = "Name", + AttributeType = ScalarAttributeType.S, + }, + }, KeySchema = new List() - { - new KeySchemaElement - { - AttributeName = "Name", - KeyType = KeyType.HASH, - }, - }, - ProvisionedThroughput = new ProvisionedThroughput + { + new KeySchemaElement { - ReadCapacityUnits = 10, - WriteCapacityUnits = 5, + AttributeName = "Name", + KeyType = KeyType.HASH, }, + }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); var result = await WaitTillTableCreated(client, tableName, response); @@ -154,36 +146,32 @@ public static async Task CreateTableThread(IAmazonDynamoD { TableName = tableName, AttributeDefinitions = new List() - { - new AttributeDefinition - { - AttributeName = "ForumName", // Hash attribute. - AttributeType = ScalarAttributeType.S, - }, - new AttributeDefinition - { - AttributeName = "Subject", - AttributeType = ScalarAttributeType.S, - }, - }, + { + new AttributeDefinition + { + AttributeName = "ForumName", // Hash attribute. + AttributeType = ScalarAttributeType.S, + }, + new AttributeDefinition + { + AttributeName = "Subject", + AttributeType = ScalarAttributeType.S, + }, + }, KeySchema = new List() - { - new KeySchemaElement - { - AttributeName = "ForumName", // Hash attribute - KeyType = KeyType.HASH, - }, - new KeySchemaElement - { - AttributeName = "Subject", // Range attribute - KeyType = KeyType.RANGE, - }, - }, - ProvisionedThroughput = new ProvisionedThroughput + { + new KeySchemaElement + { + AttributeName = "ForumName", // Hash attribute + KeyType = KeyType.HASH, + }, + new KeySchemaElement { - ReadCapacityUnits = 10, - WriteCapacityUnits = 5, + AttributeName = "Subject", // Range attribute + KeyType = KeyType.RANGE, }, + }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); var result = await WaitTillTableCreated(client, tableName, response); @@ -256,11 +244,7 @@ public static async Task CreateTableReply(IAmazonDynamoDB }, }, }, - ProvisionedThroughput = new ProvisionedThroughput - { - ReadCapacityUnits = 10, - WriteCapacityUnits = 5, - }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); var result = await WaitTillTableCreated(client, tableName, response); diff --git a/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadDataExample.csproj b/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadDataExample.csproj index a6f7af21bee..06fcb8c73d3 100644 --- a/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadDataExample.csproj +++ b/dotnetv3/dynamodb/CreateTablesLoadDataExample/CreateTablesLoadDataExample/CreateTablesLoadDataExample.csproj @@ -6,8 +6,8 @@ - - + + runtime; build; native; contentfiles; analyzers; buildtransitive all diff --git a/dotnetv3/dynamodb/README.md b/dotnetv3/dynamodb/README.md index 407ad563340..0f18a83e129 100644 --- a/dotnetv3/dynamodb/README.md +++ b/dotnetv3/dynamodb/README.md @@ -47,18 +47,18 @@ Code excerpts that show you how to call individual service functions. - [BatchExecuteStatement](scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatchMethods.cs#L10) - [BatchGetItem](low-level-api/LowLevelBatchGet/LowLevelBatchGet.cs#L4) -- [BatchWriteItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L202) +- [BatchWriteItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L198) - [CreateTable](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L14) -- [DeleteItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L262) -- [DeleteTable](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L391) -- [DescribeTable](low-level-api/LowLevelTableExample/LowLevelTableExample.cs#L126) +- [DeleteItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L258) +- [DeleteTable](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L387) +- [DescribeTable](low-level-api/LowLevelTableExample/LowLevelTableExample.cs#L120) - [ExecuteStatement](scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/PartiQLMethods.cs#L163) -- [GetItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L171) -- [ListTables](low-level-api/LowLevelTableExample/LowLevelTableExample.cs#L102) -- [PutItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L89) -- [Query](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L297) -- [Scan](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L350) -- [UpdateItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L119) +- [GetItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L167) +- [ListTables](low-level-api/LowLevelTableExample/LowLevelTableExample.cs#L96) +- [PutItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L85) +- [Query](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L293) +- [Scan](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L346) +- [UpdateItem](scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs#L115) ### Scenarios @@ -240,4 +240,4 @@ in the `dotnetv3` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/dotnetv3/dynamodb/low-level-api/LowLevelTableExample/LowLevelTableExample.cs b/dotnetv3/dynamodb/low-level-api/LowLevelTableExample/LowLevelTableExample.cs index 39254ea9468..3a7de3b81b7 100644 --- a/dotnetv3/dynamodb/low-level-api/LowLevelTableExample/LowLevelTableExample.cs +++ b/dotnetv3/dynamodb/low-level-api/LowLevelTableExample/LowLevelTableExample.cs @@ -80,18 +80,12 @@ private static async Task CreateExampleTable() KeyType = KeyType.RANGE //Sort key } }, - ProvisionedThroughput = new ProvisionedThroughput - { - ReadCapacityUnits = 5, - WriteCapacityUnits = 6 - }, - TableName = ExampleTableName + TableName = ExampleTableName, + BillingMode = BillingMode.PAY_PER_REQUEST, }); var tableDescription = response.TableDescription; - Console.WriteLine($"{tableDescription.TableName}: {tableDescription.TableStatus} \t " + - $"ReadsPerSec: {tableDescription.ProvisionedThroughput.ReadCapacityUnits} \t " + - $"WritesPerSec: {tableDescription.ProvisionedThroughput.WriteCapacityUnits}"); + Console.WriteLine($"{tableDescription.TableName}: {tableDescription.TableStatus}"); Console.WriteLine($"{ExampleTableName} - {tableDescription.TableStatus}"); @@ -136,21 +130,19 @@ private static async Task GetTableInformation() var table = response.Table; Console.WriteLine($"Name: {table.TableName}"); Console.WriteLine($"# of items: {table.ItemCount}"); - Console.WriteLine($"Provision Throughput (reads/sec): " + - $"{table.ProvisionedThroughput.ReadCapacityUnits}"); - Console.WriteLine($"Provision Throughput (writes/sec): " + - $"{table.ProvisionedThroughput.WriteCapacityUnits}"); + } // snippet-end:[dynamodb.dotnetv3.DescribeTableExample] // snippet-start:[dynamodb.dotnetv3.UpdateExampleTable] private static async Task UpdateExampleTable() { - Console.WriteLine("\n*** Updating table ***"); + Console.WriteLine("\n*** Updating table billing mode ***"); await Client.UpdateTableAsync(new UpdateTableRequest { TableName = ExampleTableName, + BillingMode = BillingMode.PROVISIONED, ProvisionedThroughput = new ProvisionedThroughput { ReadCapacityUnits = 6, diff --git a/dotnetv3/dynamodb/scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs b/dotnetv3/dynamodb/scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs index 99334150307..a2ec50ad485 100644 --- a/dotnetv3/dynamodb/scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs +++ b/dotnetv3/dynamodb/scenarios/DynamoDB_Basics/DynamoDB_Actions/DynamoDbMethods.cs @@ -51,11 +51,7 @@ public static async Task CreateMovieTableAsync(AmazonDynamoDBClient client KeyType = KeyType.RANGE, }, }, - ProvisionedThroughput = new ProvisionedThroughput - { - ReadCapacityUnits = 5, - WriteCapacityUnits = 5, - }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); // Wait until the table is ACTIVE and then report success. diff --git a/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/DynamoDBMethods.cs b/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/DynamoDBMethods.cs index aeb7a4562e2..fbfb7a52799 100644 --- a/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/DynamoDBMethods.cs +++ b/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/DynamoDBMethods.cs @@ -49,11 +49,7 @@ public static async Task CreateMovieTableAsync(string tableName) KeyType = KeyType.RANGE, }, }, - ProvisionedThroughput = new ProvisionedThroughput - { - ReadCapacityUnits = 5, - WriteCapacityUnits = 5, - }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); // Wait until the table is ACTIVE and then report success. diff --git a/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/PartiQLBasics.cs b/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/PartiQLBasics.cs index c06c822ca2e..4568b22ab86 100644 --- a/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/PartiQLBasics.cs +++ b/dotnetv3/dynamodb/scenarios/PartiQL_Basics_Scenario/PartiQL_Basics_Scenario/PartiQLBasics.cs @@ -3,14 +3,10 @@ // snippet-start:[PartiQL.dotnetv3.PartiQLBasicsScenario] -// Before you run this example, download 'movies.json' from -// https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/GettingStarted.Js.02.html, -// and put it in the same folder as the example. - // Separator for the console display. var SepBar = new string('-', 80); const string tableName = "movie_table"; -const string movieFileName = "moviedata.json"; +const string movieFileName = @"..\..\..\..\..\..\..\..\resources\sample_files\movies.json"; var client = new AmazonDynamoDBClient(); diff --git a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/DynamoDBMethods.cs b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/DynamoDBMethods.cs index f674bc0bc7a..86878a0ea36 100644 --- a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/DynamoDBMethods.cs +++ b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/DynamoDBMethods.cs @@ -49,11 +49,7 @@ public static async Task CreateMovieTableAsync(string tableName) KeyType = KeyType.RANGE, }, }, - ProvisionedThroughput = new ProvisionedThroughput - { - ReadCapacityUnits = 5, - WriteCapacityUnits = 5, - }, + BillingMode = BillingMode.PAY_PER_REQUEST, }); // Wait until the table is ACTIVE and then report success. diff --git a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatch.cs b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatch.cs index 46c7325d57b..1cd917a8c5f 100644 --- a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatch.cs +++ b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatch.cs @@ -10,7 +10,7 @@ // Separator for the console display. var SepBar = new string('-', 80); const string tableName = "movie_table"; -const string movieFileName = "moviedata.json"; +const string movieFileName = @"..\..\..\..\..\..\..\..\resources\sample_files\movies.json"; DisplayInstructions(); diff --git a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatchMethods.cs b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatchMethods.cs index 6468cb86b83..c13d20c33ce 100644 --- a/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatchMethods.cs +++ b/dotnetv3/dynamodb/scenarios/PartiQL_Batch_Scenario/PartiQL_Batch_Scenario/PartiQLBatchMethods.cs @@ -120,7 +120,7 @@ public static async Task GetBatch( int year1, int year2) { - var getBatch = $"SELECT FROM {tableName} WHERE title = ? AND year = ?"; + var getBatch = $"SELECT * FROM {tableName} WHERE title = ? AND year = ?"; var statements = new List { new BatchStatementRequest @@ -153,7 +153,10 @@ public static async Task GetBatch( { response.Responses.ForEach(r => { - Console.WriteLine($"{r.Item["title"]}\t{r.Item["year"]}"); + if (r.Item.Any()) + { + Console.WriteLine($"{r.Item["title"]}\t{r.Item["year"]}"); + } }); return true; } diff --git a/dotnetv4/Aurora/Actions/AuroraWrapper.cs b/dotnetv4/Aurora/Actions/AuroraWrapper.cs index 18c7646cc9e..9f469df9fb2 100644 --- a/dotnetv4/Aurora/Actions/AuroraWrapper.cs +++ b/dotnetv4/Aurora/Actions/AuroraWrapper.cs @@ -124,7 +124,7 @@ public async Task ModifyIntegerParametersInGroupAsync(string groupName, { foreach (var p in parameters) { - if (p.IsModifiable.Value && p.DataType == "integer") + if (p.IsModifiable.GetValueOrDefault() && p.DataType == "integer") { while (newValue == 0) { diff --git a/dotnetv4/Bedrock/Actions/BedrockActions.csproj b/dotnetv4/Bedrock/Actions/BedrockActions.csproj index 9f12aa3e3f2..ffee5ec19c7 100644 --- a/dotnetv4/Bedrock/Actions/BedrockActions.csproj +++ b/dotnetv4/Bedrock/Actions/BedrockActions.csproj @@ -9,6 +9,7 @@ + diff --git a/dotnetv4/Cognito/Actions/CognitoActions.csproj b/dotnetv4/Cognito/Actions/CognitoActions.csproj new file mode 100644 index 00000000000..653035419c0 --- /dev/null +++ b/dotnetv4/Cognito/Actions/CognitoActions.csproj @@ -0,0 +1,17 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + diff --git a/dotnetv4/Cognito/Actions/CognitoWrapper.cs b/dotnetv4/Cognito/Actions/CognitoWrapper.cs new file mode 100644 index 00000000000..188a6bb1cd2 --- /dev/null +++ b/dotnetv4/Cognito/Actions/CognitoWrapper.cs @@ -0,0 +1,347 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv4.CognitoWrapper] +using System.Net; + +namespace CognitoActions; + +/// +/// Methods to perform Amazon Cognito Identity Provider actions. +/// +public class CognitoWrapper +{ + private readonly IAmazonCognitoIdentityProvider _cognitoService; + + /// + /// Constructor for the wrapper class containing Amazon Cognito actions. + /// + /// The Amazon Cognito client object. + public CognitoWrapper(IAmazonCognitoIdentityProvider cognitoService) + { + _cognitoService = cognitoService; + } + + // snippet-start:[Cognito.dotnetv4.ListUserPools] + /// + /// List the Amazon Cognito user pools for an account. + /// + /// A list of UserPoolDescriptionType objects. + public async Task> ListUserPoolsAsync() + { + var userPools = new List(); + + var userPoolsPaginator = _cognitoService.Paginators.ListUserPools(new ListUserPoolsRequest()); + + await foreach (var response in userPoolsPaginator.Responses) + { + userPools.AddRange(response.UserPools); + } + + return userPools; + } + + // snippet-end:[Cognito.dotnetv4.ListUserPools] + + // snippet-start:[Cognito.dotnetv4.ListUsers] + /// + /// Get a list of users for the Amazon Cognito user pool. + /// + /// The user pool ID. + /// A list of users. + public async Task> ListUsersAsync(string userPoolId) + { + var request = new ListUsersRequest + { + UserPoolId = userPoolId + }; + + var users = new List(); + + var usersPaginator = _cognitoService.Paginators.ListUsers(request); + await foreach (var response in usersPaginator.Responses) + { + users.AddRange(response.Users); + } + + return users; + } + + // snippet-end:[Cognito.dotnetv4.ListUsers] + + // snippet-start:[Cognito.dotnetv4.AdminRespondToAuthChallenge] + /// + /// Respond to an admin authentication challenge. + /// + /// The name of the user. + /// The client ID. + /// The multi-factor authentication code. + /// The current application session. + /// The user pool ID. + /// The result of the authentication response. + public async Task AdminRespondToAuthChallengeAsync( + string userName, + string clientId, + string mfaCode, + string session, + string userPoolId) + { + Console.WriteLine("SOFTWARE_TOKEN_MFA challenge is generated"); + + var challengeResponses = new Dictionary(); + challengeResponses.Add("USERNAME", userName); + challengeResponses.Add("SOFTWARE_TOKEN_MFA_CODE", mfaCode); + + var respondToAuthChallengeRequest = new AdminRespondToAuthChallengeRequest + { + ChallengeName = ChallengeNameType.SOFTWARE_TOKEN_MFA, + ClientId = clientId, + ChallengeResponses = challengeResponses, + Session = session, + UserPoolId = userPoolId, + }; + + var response = await _cognitoService.AdminRespondToAuthChallengeAsync(respondToAuthChallengeRequest); + Console.WriteLine($"Response to Authentication {response.AuthenticationResult.TokenType}"); + return response.AuthenticationResult; + } + + // snippet-end:[Cognito.dotnetv4.AdminRespondToAuthChallenge] + + // snippet-start:[Cognito.dotnetv4.VerifySoftwareToken] + /// + /// Verify the TOTP and register for MFA. + /// + /// The name of the session. + /// The MFA code. + /// The status of the software token. + public async Task VerifySoftwareTokenAsync(string session, string code) + { + var tokenRequest = new VerifySoftwareTokenRequest + { + UserCode = code, + Session = session, + }; + + var verifyResponse = await _cognitoService.VerifySoftwareTokenAsync(tokenRequest); + + return verifyResponse.Status; + } + + // snippet-end:[Cognito.dotnetv4.VerifySoftwareToken] + + // snippet-start:[Cognito.dotnetv4.AssociateSoftwareToken] + /// + /// Get an MFA token to authenticate the user with the authenticator. + /// + /// The session name. + /// The session name. + public async Task AssociateSoftwareTokenAsync(string session) + { + var softwareTokenRequest = new AssociateSoftwareTokenRequest + { + Session = session, + }; + + var tokenResponse = await _cognitoService.AssociateSoftwareTokenAsync(softwareTokenRequest); + var secretCode = tokenResponse.SecretCode; + + Console.WriteLine($"Use the following secret code to set up the authenticator: {secretCode}"); + + return tokenResponse.Session; + } + + // snippet-end:[Cognito.dotnetv4.AssociateSoftwareToken] + + // snippet-start:[Cognito.dotnetv4.AdminInitiateAuth] + /// + /// Initiate an admin auth request. + /// + /// The client ID to use. + /// The ID of the user pool. + /// The username to authenticate. + /// The user's password. + /// The session to use in challenge-response. + public async Task AdminInitiateAuthAsync(string clientId, string userPoolId, string userName, string password) + { + var authParameters = new Dictionary(); + authParameters.Add("USERNAME", userName); + authParameters.Add("PASSWORD", password); + + var request = new AdminInitiateAuthRequest + { + ClientId = clientId, + UserPoolId = userPoolId, + AuthParameters = authParameters, + AuthFlow = AuthFlowType.ADMIN_USER_PASSWORD_AUTH, + }; + + var response = await _cognitoService.AdminInitiateAuthAsync(request); + return response.Session; + } + // snippet-end:[Cognito.dotnetv4.AdminInitiateAuth] + + // snippet-start:[Cognito.dotnetv4.InitiateAuth] + /// + /// Initiate authorization. + /// + /// The client Id of the application. + /// The name of the user who is authenticating. + /// The password for the user who is authenticating. + /// The response from the initiate auth request. + public async Task InitiateAuthAsync(string clientId, string userName, string password) + { + var authParameters = new Dictionary(); + authParameters.Add("USERNAME", userName); + authParameters.Add("PASSWORD", password); + + var authRequest = new InitiateAuthRequest + + { + ClientId = clientId, + AuthParameters = authParameters, + AuthFlow = AuthFlowType.USER_PASSWORD_AUTH, + }; + + var response = await _cognitoService.InitiateAuthAsync(authRequest); + Console.WriteLine($"Result Challenge is : {response.ChallengeName}"); + + return response; + } + // snippet-end:[Cognito.dotnetv4.InitiateAuth] + + // snippet-start:[Cognito.dotnetv4.ConfirmSignUp] + /// + /// Confirm that the user has signed up. + /// + /// The Id of this application. + /// The confirmation code sent to the user. + /// The username. + /// True if successful. + public async Task ConfirmSignupAsync(string clientId, string code, string userName) + { + var signUpRequest = new ConfirmSignUpRequest + { + ClientId = clientId, + ConfirmationCode = code, + Username = userName, + }; + + var response = await _cognitoService.ConfirmSignUpAsync(signUpRequest); + if (response.HttpStatusCode == HttpStatusCode.OK) + { + Console.WriteLine($"{userName} was confirmed"); + return true; + } + return false; + } + + // snippet-end:[Cognito.dotnetv4.ConfirmSignUp] + + // snippet-start:[Cognito.dotnetv4.ConfirmDevice] + /// + /// Initiates and confirms tracking of the device. + /// + /// The user's access token. + /// The key of the device from Amazon Cognito. + /// The device name. + /// + public async Task ConfirmDeviceAsync(string accessToken, string deviceKey, string deviceName) + { + var request = new ConfirmDeviceRequest + { + AccessToken = accessToken, + DeviceKey = deviceKey, + DeviceName = deviceName + }; + + var response = await _cognitoService.ConfirmDeviceAsync(request); + return response.UserConfirmationNecessary; + } + + // snippet-end:[Cognito.dotnetv4.ConfirmDevice] + + // snippet-start:[Cognito.dotnetv4.ResendConfirmationCode] + /// + /// Send a new confirmation code to a user. + /// + /// The Id of the client application. + /// The username of user who will receive the code. + /// The delivery details. + public async Task ResendConfirmationCodeAsync(string clientId, string userName) + { + var codeRequest = new ResendConfirmationCodeRequest + { + ClientId = clientId, + Username = userName, + }; + + var response = await _cognitoService.ResendConfirmationCodeAsync(codeRequest); + + Console.WriteLine($"Method of delivery is {response.CodeDeliveryDetails.DeliveryMedium}"); + + return response.CodeDeliveryDetails; + } + + // snippet-end:[Cognito.dotnetv4.ResendConfirmationCode] + + // snippet-start:[Cognito.dotnetv4.GetAdminUser] + /// + /// Get the specified user from an Amazon Cognito user pool with administrator access. + /// + /// The name of the user. + /// The Id of the Amazon Cognito user pool. + /// Async task. + public async Task GetAdminUserAsync(string userName, string poolId) + { + AdminGetUserRequest userRequest = new AdminGetUserRequest + { + Username = userName, + UserPoolId = poolId, + }; + + var response = await _cognitoService.AdminGetUserAsync(userRequest); + + Console.WriteLine($"User status {response.UserStatus}"); + return response.UserStatus; + } + + // snippet-end:[Cognito.dotnetv4.GetAdminUser] + + // snippet-start:[Cognito.dotnetv4.SignUp] + /// + /// Sign up a new user. + /// + /// The client Id of the application. + /// The username to use. + /// The user's password. + /// The email address of the user. + /// A Boolean value indicating whether the user was confirmed. + public async Task SignUpAsync(string clientId, string userName, string password, string email) + { + var userAttrs = new AttributeType + { + Name = "email", + Value = email, + }; + + var userAttrsList = new List(); + + userAttrsList.Add(userAttrs); + + var signUpRequest = new SignUpRequest + { + UserAttributes = userAttrsList, + Username = userName, + ClientId = clientId, + Password = password + }; + + var response = await _cognitoService.SignUpAsync(signUpRequest); + return response.HttpStatusCode == HttpStatusCode.OK; + } + + // snippet-end:[Cognito.dotnetv4.SignUp] +} + +// snippet-end:[Cognito.dotnetv4.CognitoWrapper] \ No newline at end of file diff --git a/dotnetv4/Cognito/Actions/HelloCognito.cs b/dotnetv4/Cognito/Actions/HelloCognito.cs new file mode 100644 index 00000000000..230a4d86799 --- /dev/null +++ b/dotnetv4/Cognito/Actions/HelloCognito.cs @@ -0,0 +1,64 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv4.HelloCognito] + +using LogLevel = Microsoft.Extensions.Logging.LogLevel; + +namespace CognitoActions; + +/// +/// A class that introduces the Amazon Cognito Identity Provider by listing the +/// user pools for the account. +/// +public class HelloCognito +{ + private static ILogger logger = null!; + + static async Task Main(string[] args) + { + // Set up dependency injection for Amazon Cognito. + using var host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => + logging.AddFilter("System", LogLevel.Debug) + .AddFilter("Microsoft", LogLevel.Information) + .AddFilter("Microsoft", LogLevel.Trace)) + .ConfigureServices((_, services) => + services.AddAWSService() + .AddTransient() + ) + .Build(); + + logger = LoggerFactory.Create(builder => { builder.AddConsole(); }) + .CreateLogger(); + + var amazonClient = host.Services.GetRequiredService(); + + Console.Clear(); + Console.WriteLine("Hello Amazon Cognito."); + Console.WriteLine("Let's get a list of your Amazon Cognito user pools."); + + var userPools = new List(); + + var userPoolsPaginator = amazonClient.Paginators.ListUserPools(new ListUserPoolsRequest()); + + await foreach (var response in userPoolsPaginator.Responses) + { + userPools.AddRange(response.UserPools); + } + + if (userPools.Count > 0) + { + userPools.ForEach(userPool => + { + Console.WriteLine($"{userPool.Name}\t{userPool.Id}"); + }); + } + else + { + Console.WriteLine("No user pools were found."); + } + } +} + +// snippet-end:[Cognito.dotnetv4.HelloCognito] \ No newline at end of file diff --git a/dotnetv4/Cognito/Actions/Usings.cs b/dotnetv4/Cognito/Actions/Usings.cs new file mode 100644 index 00000000000..5b7cea27136 --- /dev/null +++ b/dotnetv4/Cognito/Actions/Usings.cs @@ -0,0 +1,13 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv3.Usings] +global using Amazon.CognitoIdentityProvider; +global using Amazon.CognitoIdentityProvider.Model; +global using Microsoft.Extensions.DependencyInjection; +global using Microsoft.Extensions.Hosting; +global using Microsoft.Extensions.Logging; +global using Microsoft.Extensions.Logging.Console; +global using Microsoft.Extensions.Logging.Debug; + +// snippet-end:[Cognito.dotnetv3.Usings] \ No newline at end of file diff --git a/dotnetv4/Cognito/CognitoExamples.sln b/dotnetv4/Cognito/CognitoExamples.sln new file mode 100644 index 00000000000..694f56abe02 --- /dev/null +++ b/dotnetv4/Cognito/CognitoExamples.sln @@ -0,0 +1,48 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.2.32630.192 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Actions", "Actions", "{7907FB6A-1353-4735-95DC-EEC5DF8C0649}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scenarios", "Scenarios", "{B987097B-189C-4D0B-99BC-E67CD705BCA0}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Tests", "Tests", "{5455D423-2AFC-4BC6-B79D-9DC4270D8F7D}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CognitoActions", "Actions\CognitoActions.csproj", "{796910FA-6E94-460B-8CB4-97DF01B9ADC8}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CognitoBasics", "Scenarios\Cognito_Basics\CognitoBasics.csproj", "{B1731AE1-381F-4044-BEBE-269FF7E24B1F}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "CognitoTests", "Tests\CognitoTests.csproj", "{6046A2FC-6A39-4C2D-8DD9-AA3740B17B88}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {796910FA-6E94-460B-8CB4-97DF01B9ADC8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {796910FA-6E94-460B-8CB4-97DF01B9ADC8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {796910FA-6E94-460B-8CB4-97DF01B9ADC8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {796910FA-6E94-460B-8CB4-97DF01B9ADC8}.Release|Any CPU.Build.0 = Release|Any CPU + {B1731AE1-381F-4044-BEBE-269FF7E24B1F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B1731AE1-381F-4044-BEBE-269FF7E24B1F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B1731AE1-381F-4044-BEBE-269FF7E24B1F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B1731AE1-381F-4044-BEBE-269FF7E24B1F}.Release|Any CPU.Build.0 = Release|Any CPU + {6046A2FC-6A39-4C2D-8DD9-AA3740B17B88}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6046A2FC-6A39-4C2D-8DD9-AA3740B17B88}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6046A2FC-6A39-4C2D-8DD9-AA3740B17B88}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6046A2FC-6A39-4C2D-8DD9-AA3740B17B88}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {796910FA-6E94-460B-8CB4-97DF01B9ADC8} = {7907FB6A-1353-4735-95DC-EEC5DF8C0649} + {B1731AE1-381F-4044-BEBE-269FF7E24B1F} = {B987097B-189C-4D0B-99BC-E67CD705BCA0} + {6046A2FC-6A39-4C2D-8DD9-AA3740B17B88} = {5455D423-2AFC-4BC6-B79D-9DC4270D8F7D} + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {870D888D-5C8B-4057-8722-F73ECF38E513} + EndGlobalSection +EndGlobal diff --git a/dotnetv4/Cognito/README.md b/dotnetv4/Cognito/README.md new file mode 100644 index 00000000000..677b1901dae --- /dev/null +++ b/dotnetv4/Cognito/README.md @@ -0,0 +1,138 @@ +# Amazon Cognito Identity Provider code examples for the SDK for .NET + +## Overview + +Shows how to use the AWS SDK for .NET to work with Amazon Cognito Identity Provider. + + + + +_Amazon Cognito Identity Provider handles user authentication and authorization for your web and mobile apps._ + +## ⚠ Important + +* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + + + + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../README.md#Prerequisites) in the `dotnetv4` folder. + + + +These examples also require the following resources: + +* An existing Amazon Cognito user pool that is configured to allow self sign-up. +* A client ID to use for authenticating with Amazon Cognito. + + +To create these resources, run the AWS CloudFormation script in the +[resources/cdk/cognito_scenario_user_pool_with_mfa](../../resources/cdk/cognito_scenario_user_pool_with_mfa) +folder. This script outputs a user pool ID and a client ID that you can use to run +the scenario. + + +### Single actions + +Code excerpts that show you how to call individual service functions. + +- [AdminGetUser](Actions/CognitoWrapper.cs#L288) +- [AdminInitiateAuth](Actions/CognitoWrapper.cs#L156) +- [AdminRespondToAuthChallenge](Actions/CognitoWrapper.cs#L72) +- [AssociateSoftwareToken](Actions/CognitoWrapper.cs#L133) +- [ConfirmDevice](Actions/CognitoWrapper.cs#L241) +- [ConfirmSignUp](Actions/CognitoWrapper.cs#L213) +- [InitiateAuth](Actions/CognitoWrapper.cs#L184) +- [ListUserPools](Actions/CognitoWrapper.cs#L25) +- [ListUsers](Actions/CognitoWrapper.cs#L46) +- [ResendConfirmationCode](Actions/CognitoWrapper.cs#L264) +- [SignUp](Actions/CognitoWrapper.cs#L311) +- [VerifySoftwareToken](Actions/CognitoWrapper.cs#L111) + +### Scenarios + +Code examples that show you how to accomplish a specific task by calling multiple +functions within the same service. + +- [Sign up a user with a user pool that requires MFA](Actions/CognitoWrapper.cs) + + + + + +## Run the examples + +### Instructions + +For general instructions to run the examples, see the +[README](../README.md#building-and-running-the-code-examples) in the `dotnetv4` folder. + +Some projects might include a settings.json file. Before compiling the project, +you can change these values to match your own account and resources. Alternatively, +add a settings.local.json file with your local settings, which will be loaded automatically +when the application runs. + +After the example compiles, you can run it from the command line. To do so, navigate to +the folder that contains the .csproj file and run the following command: + +``` +dotnet run +``` + +Alternatively, you can run the example from within your IDE. + + + + + + + +#### Sign up a user with a user pool that requires MFA + +This example shows you how to do the following: + +- Sign up and confirm a user with a username, password, and email address. +- Set up multi-factor authentication by associating an MFA application with the user. +- Sign in by using a password and an MFA code. + + + + + + + + +### Tests + +⚠ Running tests might result in charges to your AWS account. + + +To find instructions for running these tests, see the [README](../README.md#Tests) +in the `dotnetv4` folder. + + + + + + +## Additional resources + +- [Amazon Cognito Identity Provider Developer Guide](https://docs.aws.amazon.com/cognito/latest/developerguide/cognito-user-identity-pools.html) +- [Amazon Cognito Identity Provider API Reference](https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/Welcome.html) +- [SDK for .NET Amazon Cognito Identity Provider reference](https://docs.aws.amazon.com/sdkfornet/v3/apidocs/items/CognitoIdentity/NCognitoIdentity.html) + + + + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 \ No newline at end of file diff --git a/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.cs b/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.cs new file mode 100644 index 00000000000..a5418365f5f --- /dev/null +++ b/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.cs @@ -0,0 +1,160 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv4.Main] + +using LogLevel = Microsoft.Extensions.Logging.LogLevel; + +namespace CognitoBasics; + +public static class CognitoBasics +{ + public static bool _interactive = true; + + public static async Task Main(string[] args) + { + // Set up dependency injection for Amazon Cognito. + using var host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => + logging.AddFilter("System", LogLevel.Debug) + .AddFilter("Microsoft", LogLevel.Information) + .AddFilter("Microsoft", LogLevel.Trace)) + .ConfigureServices((_, services) => + services.AddAWSService() + .AddTransient() + ) + .Build(); ; + + var configuration = new ConfigurationBuilder() + .SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("settings.json") // Load settings from .json file. + .AddJsonFile("settings.local.json", + true) // Optionally load local settings. + .Build(); + + var cognitoWrapper = host.Services.GetRequiredService(); + + await RunScenario(cognitoWrapper, configuration); + } + + /// + /// Run the example scenario. + /// + /// Wrapper for service actions. + /// Scenario configuration. + /// + public static async Task RunScenario(CognitoWrapper cognitoWrapper, IConfigurationRoot configuration) + { + Console.WriteLine(new string('-', 80)); + UiMethods.DisplayOverview(); + Console.WriteLine(new string('-', 80)); + + // clientId - The app client Id value that you get from the AWS CDK script. + var clientId = + configuration[ + "ClientId"]; // "*** REPLACE WITH CLIENT ID VALUE FROM CDK SCRIPT"; + + // poolId - The pool Id that you get from the AWS CDK script. + var poolId = + configuration["PoolId"]!; // "*** REPLACE WITH POOL ID VALUE FROM CDK SCRIPT"; + var userName = configuration["UserName"]; + var password = configuration["Password"]; + var email = configuration["Email"]; + + // If the username wasn't set in the configuration file, + // get it from the user now. + if (userName is null) + { + do + { + Console.Write("Username: "); + userName = Console.ReadLine(); + } while (string.IsNullOrEmpty(userName)); + } + + Console.WriteLine($"\nUsername: {userName}"); + + // If the password wasn't set in the configuration file, + // get it from the user now. + if (password is null) + { + do + { + Console.Write("Password: "); + password = Console.ReadLine(); + } while (string.IsNullOrEmpty(password)); + } + + // If the email address wasn't set in the configuration file, + // get it from the user now. + if (email is null) + { + do + { + Console.Write("Email: "); + email = Console.ReadLine(); + } while (string.IsNullOrEmpty(email)); + } + + // Now sign up the user. + Console.WriteLine($"\nSigning up {userName} with email address: {email}"); + await cognitoWrapper.SignUpAsync(clientId, userName, password, email); + + // Add the user to the user pool. + Console.WriteLine($"Adding {userName} to the user pool"); + await cognitoWrapper.GetAdminUserAsync(userName, poolId); + + UiMethods.DisplayTitle("Get confirmation code"); + Console.WriteLine($"Conformation code sent to {userName}."); + + Console.Write("Would you like to send a new code? (Y/N) "); + var answer = _interactive ? Console.ReadLine() : "y"; + + if (answer!.ToLower() == "y") + { + await cognitoWrapper.ResendConfirmationCodeAsync(clientId, userName); + Console.WriteLine("Sending a new confirmation code"); + } + + Console.Write("Enter confirmation code (from Email): "); + var code = _interactive ? Console.ReadLine() : "-"; + + await cognitoWrapper.ConfirmSignupAsync(clientId, code, userName); + + + UiMethods.DisplayTitle("Checking status"); + Console.WriteLine($"Rechecking the status of {userName} in the user pool"); + await cognitoWrapper.GetAdminUserAsync(userName, poolId); + + Console.WriteLine($"Setting up authenticator for {userName} in the user pool"); + var setupResponse = await cognitoWrapper.InitiateAuthAsync(clientId, userName, password); + + var setupSession = await cognitoWrapper.AssociateSoftwareTokenAsync(setupResponse.Session); + Console.Write("Enter the 6-digit code displayed in Google Authenticator: "); + var setupCode = _interactive ? Console.ReadLine() : "-"; + var setupResult = + await cognitoWrapper.VerifySoftwareTokenAsync(setupSession, setupCode); + Console.WriteLine($"Setup status: {setupResult}"); + + Console.WriteLine($"Now logging in {userName} in the user pool"); + var authSession = + await cognitoWrapper.AdminInitiateAuthAsync(clientId, poolId, userName, + password); + + Console.Write("Enter a new 6-digit code displayed in Google Authenticator: "); + var authCode = _interactive ? Console.ReadLine() : "-"; + var authResult = + await cognitoWrapper.AdminRespondToAuthChallengeAsync(userName, clientId, + authCode, authSession, poolId); + Console.WriteLine( + $"Authenticated and received access token: {authResult.AccessToken}"); + + + Console.WriteLine(new string('-', 80)); + Console.WriteLine("Cognito scenario is complete."); + Console.WriteLine(new string('-', 80)); + return true; + } +} + +// snippet-end:[Cognito.dotnetv4.Main] \ No newline at end of file diff --git a/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.csproj b/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.csproj new file mode 100644 index 00000000000..fdf7a548655 --- /dev/null +++ b/dotnetv4/Cognito/Scenarios/Cognito_Basics/CognitoBasics.csproj @@ -0,0 +1,29 @@ + + + + Exe + net8.0 + enable + enable + + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + settings.json + + + + diff --git a/dotnetv4/Cognito/Scenarios/Cognito_Basics/UIMethods.cs b/dotnetv4/Cognito/Scenarios/Cognito_Basics/UIMethods.cs new file mode 100644 index 00000000000..ccc9c967e24 --- /dev/null +++ b/dotnetv4/Cognito/Scenarios/Cognito_Basics/UIMethods.cs @@ -0,0 +1,44 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv4.UIMethods] +namespace CognitoBasics; + +/// +/// Some useful methods to make screen display easier. +/// +public static class UiMethods +{ + /// + /// Show information about the scenario. + /// + public static void DisplayOverview() + { + DisplayTitle("Welcome to the Amazon Cognito Demo"); + + Console.WriteLine("This example application does the following:"); + Console.WriteLine("\t 1. Signs up a user."); + Console.WriteLine("\t 2. Gets the user's confirmation status."); + Console.WriteLine("\t 3. Resends the confirmation code if the user requested another code."); + Console.WriteLine("\t 4. Confirms that the user signed up."); + Console.WriteLine("\t 5. Invokes the initiateAuth to sign in. This results in being prompted to set up TOTP (time-based one-time password). (The response is “ChallengeName”: “MFA_SETUP”)."); + Console.WriteLine("\t 6. Invokes the AssociateSoftwareToken method to generate a TOTP MFA private key. This can be used with Google Authenticator."); + Console.WriteLine("\t 7. Invokes the VerifySoftwareToken method to verify the TOTP and register for MFA."); + Console.WriteLine("\t 8. Invokes the AdminInitiateAuth to sign in again. This results in being prompted to submit a TOTP (Response: “ChallengeName”: “SOFTWARE_TOKEN_MFA”)."); + Console.WriteLine("\t 9. Invokes the AdminRespondToAuthChallenge to get back a token."); + } + + /// + /// Display a line of hyphens, the centered text of the title and another + /// line of hyphens. + /// + /// The string to be displayed. + public static void DisplayTitle(string strTitle) + { + Console.WriteLine(); + Console.WriteLine(strTitle); + Console.WriteLine(); + } +} + +// snippet-end:[Cognito.dotnetv4.UIMethods] \ No newline at end of file diff --git a/dotnetv4/Cognito/Scenarios/Cognito_Basics/Usings.cs b/dotnetv4/Cognito/Scenarios/Cognito_Basics/Usings.cs new file mode 100644 index 00000000000..8a06b87643b --- /dev/null +++ b/dotnetv4/Cognito/Scenarios/Cognito_Basics/Usings.cs @@ -0,0 +1,14 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[Cognito.dotnetv4.CognitoBasics.Usings] +global using Amazon.CognitoIdentityProvider; +global using CognitoActions; +global using Microsoft.Extensions.Configuration; +global using Microsoft.Extensions.DependencyInjection; +global using Microsoft.Extensions.Hosting; +global using Microsoft.Extensions.Logging; +global using Microsoft.Extensions.Logging.Console; +global using Microsoft.Extensions.Logging.Debug; + +// snippet-end:[Cognito.dotnetv4.CognitoBasics.Usings] \ No newline at end of file diff --git a/dotnetv4/Cognito/Scenarios/Cognito_Basics/settings.json b/dotnetv4/Cognito/Scenarios/Cognito_Basics/settings.json new file mode 100644 index 00000000000..4bfac53daa4 --- /dev/null +++ b/dotnetv4/Cognito/Scenarios/Cognito_Basics/settings.json @@ -0,0 +1,9 @@ +{ + "ClientId": "client_id_from_cdk", + "PoolId": "client_id_from_cdk", + "UserName": "username", + "Password": "EXAMPLEPASSWORD", + "Email": "useremail", + "adminUserName": "admin", + "adminPassword": "EXAMPLEPASSWORD" +} diff --git a/dotnetv4/Cognito/Tests/CognitoBasicsTests.cs b/dotnetv4/Cognito/Tests/CognitoBasicsTests.cs new file mode 100644 index 00000000000..974973c7b8f --- /dev/null +++ b/dotnetv4/Cognito/Tests/CognitoBasicsTests.cs @@ -0,0 +1,198 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +using System.Net; +using Amazon.CognitoIdentityProvider; +using Amazon.CognitoIdentityProvider.Model; +using Amazon.Runtime; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using Moq; + +namespace CognitoWrapperTests; + +/// +/// Tests for the Cognito scenario. +/// +public class CognitoBasicsTests +{ + private ILoggerFactory _loggerFactory = null!; + + [Trait("Category", "Unit")] + [Fact] + public async Task ScenarioTest() + { + // Arrange. + _loggerFactory = LoggerFactory.Create(builder => + { + builder.AddConsole(); + }); + + var mockCognitoService = new Mock(); + + mockCognitoService.Setup(client => client.Paginators.ListUserPools( + It.IsAny())) + .Returns(new TestUserPoolPaginator() as IListUserPoolsPaginator); + + mockCognitoService.Setup(client => client.Paginators.ListUserPools( + It.IsAny())) + .Returns(new TestUserPoolPaginator() as IListUserPoolsPaginator); + + mockCognitoService.Setup(client => client.AdminRespondToAuthChallengeAsync( + It.IsAny(), + It.IsAny())) + .Returns((AdminRespondToAuthChallengeRequest r, + CancellationToken token) => + { + return Task.FromResult(new AdminRespondToAuthChallengeResponse() + { + HttpStatusCode = HttpStatusCode.OK, + AuthenticationResult = new AuthenticationResultType() + }); + }); + + mockCognitoService.Setup(client => client.VerifySoftwareTokenAsync( + It.IsAny(), + It.IsAny())) + .Returns((VerifySoftwareTokenRequest r, + CancellationToken token) => + { + return Task.FromResult(new VerifySoftwareTokenResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + mockCognitoService.Setup(client => client.AssociateSoftwareTokenAsync( + It.IsAny(), + It.IsAny())) + .Returns((AssociateSoftwareTokenRequest r, + CancellationToken token) => + { + return Task.FromResult(new AssociateSoftwareTokenResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + mockCognitoService.Setup(client => client.AdminInitiateAuthAsync( + It.IsAny(), + It.IsAny())) + .Returns((AdminInitiateAuthRequest r, + CancellationToken token) => + { + return Task.FromResult(new AdminInitiateAuthResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + mockCognitoService.Setup(client => client.InitiateAuthAsync( + It.IsAny(), + It.IsAny())) + .Returns((InitiateAuthRequest r, + CancellationToken token) => + { + return Task.FromResult(new InitiateAuthResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + mockCognitoService.Setup(client => client.ConfirmSignUpAsync( + It.IsAny(), + It.IsAny())) + .Returns((ConfirmSignUpRequest r, + CancellationToken token) => + { + return Task.FromResult(new ConfirmSignUpResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + mockCognitoService.Setup(client => client.ResendConfirmationCodeAsync( + It.IsAny(), + It.IsAny())) + .Returns((ResendConfirmationCodeRequest r, + CancellationToken token) => + { + return Task.FromResult(new ResendConfirmationCodeResponse() + { + HttpStatusCode = HttpStatusCode.OK, + CodeDeliveryDetails = new CodeDeliveryDetailsType() + }); + }); + + mockCognitoService.Setup(client => client.AdminGetUserAsync( + It.IsAny(), + It.IsAny())) + .Returns((AdminGetUserRequest r, + CancellationToken token) => + { + return Task.FromResult(new AdminGetUserResponse() + { + HttpStatusCode = HttpStatusCode.OK, + UserStatus = UserStatusType.CONFIRMED + }); + }); + + mockCognitoService.Setup(client => client.SignUpAsync( + It.IsAny(), + It.IsAny())) + .Returns((SignUpRequest r, + CancellationToken token) => + { + return Task.FromResult(new SignUpResponse() + { + HttpStatusCode = HttpStatusCode.OK, + }); + }); + + var configuration = new ConfigurationBuilder() + .SetBasePath(Directory.GetCurrentDirectory()) + .AddJsonFile("testsettings.json") // Load test settings from .json file. + .AddJsonFile("testsettings.local.json", + true) // Optionally load local settings. + .Build(); + + var wrapper = new CognitoWrapper(mockCognitoService.Object); + CognitoBasics.CognitoBasics._interactive = false; + + var success = + await CognitoBasics.CognitoBasics.RunScenario(wrapper, configuration); + Assert.True(success); + } + +} + + +/// +/// Mock Paginator for user pool response. +/// +public class TestUsersPaginator : IPaginator, IListUsersPaginator +{ + public IAsyncEnumerable PaginateAsync( + CancellationToken cancellationToken = new CancellationToken()) + { + throw new NotImplementedException(); + } + + public IPaginatedEnumerable Responses { get; } = null!; + public IPaginatedEnumerable Users { get; } = null!; +} + +/// +/// Mock Paginator for user response. +/// +public class TestUserPoolPaginator : IPaginator, IListUserPoolsPaginator +{ + public IAsyncEnumerable PaginateAsync( + CancellationToken cancellationToken = new CancellationToken()) + { + throw new NotImplementedException(); + } + + public IPaginatedEnumerable Responses { get; } = null!; + public IPaginatedEnumerable UserPools { get; } = null!; +} \ No newline at end of file diff --git a/dotnetv4/Cognito/Tests/CognitoTests.csproj b/dotnetv4/Cognito/Tests/CognitoTests.csproj new file mode 100644 index 00000000000..fb9883ad93d --- /dev/null +++ b/dotnetv4/Cognito/Tests/CognitoTests.csproj @@ -0,0 +1,38 @@ + + + + net8.0 + enable + enable + + false + + + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + PreserveNewest + + + PreserveNewest + testsettings.json + + + + + + + + + diff --git a/dotnetv4/Cognito/Tests/Usings.cs b/dotnetv4/Cognito/Tests/Usings.cs new file mode 100644 index 00000000000..d77a2d566c5 --- /dev/null +++ b/dotnetv4/Cognito/Tests/Usings.cs @@ -0,0 +1,8 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +global using CognitoActions; +global using Xunit; + +// Optional. +[assembly: CollectionBehavior(DisableTestParallelization = true)] \ No newline at end of file diff --git a/dotnetv4/Cognito/Tests/testsettings.json b/dotnetv4/Cognito/Tests/testsettings.json new file mode 100644 index 00000000000..eefdb2c8435 --- /dev/null +++ b/dotnetv4/Cognito/Tests/testsettings.json @@ -0,0 +1,8 @@ +{ + "UserName": "someuser", + "Email": "someone@example.com", + "Password": "AGoodPassword1234", + "UserPoolId": "IDENTIFY_POOL_ID", + "ClientId": "CLIENT_ID_FROM_CDK_SCRIPT", + "PoolId": "USER_POOL_ID_FROM_CDK_SCRIPT" +} diff --git a/dotnetv4/DotNetV4Examples.sln b/dotnetv4/DotNetV4Examples.sln index ab7be69d4d9..d46afcd8c1e 100644 --- a/dotnetv4/DotNetV4Examples.sln +++ b/dotnetv4/DotNetV4Examples.sln @@ -119,6 +119,16 @@ Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Basics", "EC2\Scenarios\EC2 EndProject Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "EC2Actions", "EC2\Actions\EC2Actions.csproj", "{0633CB2B-3508-48E5-A8C2-427A83A5CA6E}" EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Cognito", "Cognito", "{F5214562-85F4-4FD8-B56D-C5D8E7914901}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CognitoTests", "Cognito\Tests\CognitoTests.csproj", "{63DC05A0-5B16-45A4-BDE5-90DD2E200507}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scenarios", "Scenarios", "{D38A409C-EE40-4E70-B500-F3D6EF8E82A4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CognitoBasics", "Cognito\Scenarios\Cognito_Basics\CognitoBasics.csproj", "{38C8C3B0-163D-4B7B-86A2-3EFFBC165E99}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "CognitoActions", "Cognito\Actions\CognitoActions.csproj", "{1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1}" +EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution Debug|Any CPU = Debug|Any CPU @@ -293,6 +303,18 @@ Global {0633CB2B-3508-48E5-A8C2-427A83A5CA6E}.Debug|Any CPU.Build.0 = Debug|Any CPU {0633CB2B-3508-48E5-A8C2-427A83A5CA6E}.Release|Any CPU.ActiveCfg = Release|Any CPU {0633CB2B-3508-48E5-A8C2-427A83A5CA6E}.Release|Any CPU.Build.0 = Release|Any CPU + {63DC05A0-5B16-45A4-BDE5-90DD2E200507}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {63DC05A0-5B16-45A4-BDE5-90DD2E200507}.Debug|Any CPU.Build.0 = Debug|Any CPU + {63DC05A0-5B16-45A4-BDE5-90DD2E200507}.Release|Any CPU.ActiveCfg = Release|Any CPU + {63DC05A0-5B16-45A4-BDE5-90DD2E200507}.Release|Any CPU.Build.0 = Release|Any CPU + {38C8C3B0-163D-4B7B-86A2-3EFFBC165E99}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {38C8C3B0-163D-4B7B-86A2-3EFFBC165E99}.Debug|Any CPU.Build.0 = Debug|Any CPU + {38C8C3B0-163D-4B7B-86A2-3EFFBC165E99}.Release|Any CPU.ActiveCfg = Release|Any CPU + {38C8C3B0-163D-4B7B-86A2-3EFFBC165E99}.Release|Any CPU.Build.0 = Release|Any CPU + {1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -349,6 +371,10 @@ Global {6C167F25-F97F-4854-8CD8-A2D446B6799B} = {9424FB14-B6DE-44CE-B675-AC2B57EC1E69} {D95519CA-BD27-45AE-B83B-3FB02E7AE445} = {6C167F25-F97F-4854-8CD8-A2D446B6799B} {0633CB2B-3508-48E5-A8C2-427A83A5CA6E} = {9424FB14-B6DE-44CE-B675-AC2B57EC1E69} + {63DC05A0-5B16-45A4-BDE5-90DD2E200507} = {F5214562-85F4-4FD8-B56D-C5D8E7914901} + {D38A409C-EE40-4E70-B500-F3D6EF8E82A4} = {F5214562-85F4-4FD8-B56D-C5D8E7914901} + {38C8C3B0-163D-4B7B-86A2-3EFFBC165E99} = {D38A409C-EE40-4E70-B500-F3D6EF8E82A4} + {1AF980DF-DEEA-4E5D-9001-6EC67EB96AD1} = {F5214562-85F4-4FD8-B56D-C5D8E7914901} EndGlobalSection GlobalSection(ExtensibilityGlobals) = postSolution SolutionGuid = {08502818-E8E1-4A91-A51C-4C8C8D4FF9CA} diff --git a/gov2/redshift/README.md b/gov2/redshift/README.md index 28b7417b7ee..95df3003dfa 100644 --- a/gov2/redshift/README.md +++ b/gov2/redshift/README.md @@ -80,8 +80,15 @@ go run ./cmd -h ``` #### Learn the basics -This example shows you how to learn core operations for Amazon Redshift using an AWS SDK. +This example shows you how to do the following: +- Create a Redshift cluster. +- List databases in the cluster. +- Create a table named Movies. +- Populate the Movies table. +- Query the Movies table by year. +- Modify the Redshift cluster. +- Delete the Amazon Redshift cluster. @@ -117,4 +124,4 @@ in the `gov2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javascript/example_code/cognito/lambda-trigger-pre-sign-up-auto-confirm.js b/javascript/example_code/cognito/lambda-trigger-pre-sign-up-auto-confirm.js index 0e6881e3856..7c04c357d7f 100644 --- a/javascript/example_code/cognito/lambda-trigger-pre-sign-up-auto-confirm.js +++ b/javascript/example_code/cognito/lambda-trigger-pre-sign-up-auto-confirm.js @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 // snippet-start:[cognito.javascript.lambda-trigger.pre-sign-up-auto-confirm] -exports.handler = (event, context, callback) => { +export const handler = async (event, context, callback) => { // Set the user pool autoConfirmUser flag after validating the email domain event.response.autoConfirmUser = false; diff --git a/javascriptv3/example_code/bedrock-agent-runtime/package.json b/javascriptv3/example_code/bedrock-agent-runtime/package.json index 44a3a43bb4a..ec65b348886 100644 --- a/javascriptv3/example_code/bedrock-agent-runtime/package.json +++ b/javascriptv3/example_code/bedrock-agent-runtime/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/bedrock-agent-runtime-test-results.junit.xml" }, "dependencies": { "@aws-sdk/client-bedrock-agent-runtime": "^3.675.0" diff --git a/javascriptv3/example_code/bedrock-agent/package.json b/javascriptv3/example_code/bedrock-agent/package.json index d3280ea23f3..9e4a6950faa 100644 --- a/javascriptv3/example_code/bedrock-agent/package.json +++ b/javascriptv3/example_code/bedrock-agent/package.json @@ -5,8 +5,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/bedrock-agent-test-results.junit.xml" }, "dependencies": { "@aws-sdk/client-bedrock-agent": "^3.515.0" diff --git a/javascriptv3/example_code/bedrock-runtime/.gitignore b/javascriptv3/example_code/bedrock-runtime/.gitignore index e90ea2eff59..0d1d9b21219 100644 --- a/javascriptv3/example_code/bedrock-runtime/.gitignore +++ b/javascriptv3/example_code/bedrock-runtime/.gitignore @@ -1 +1,2 @@ /tempx/ +/output/ diff --git a/javascriptv3/example_code/bedrock-runtime/README.md b/javascriptv3/example_code/bedrock-runtime/README.md index beb3f1cef10..078d3512168 100644 --- a/javascriptv3/example_code/bedrock-runtime/README.md +++ b/javascriptv3/example_code/bedrock-runtime/README.md @@ -50,6 +50,15 @@ functions within the same service. - [Converse](models/ai21LabsJurassic2/converse.js#L4) - [InvokeModel](models/ai21LabsJurassic2/invoke_model.js) +### Amazon Nova + +- [Converse](models/amazonTitanText/converse.js#L4) +- [ConverseStream](models/mistral/converse.js#L4) + +### Amazon Nova Canvas + +- [InvokeModel](models/amazonNovaCanvas/invokeModel.js#L4) + ### Amazon Titan Text - [Converse](models/amazonTitanText/converse.js#L4) diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonNovaCanvas/invokeModel.js b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaCanvas/invokeModel.js new file mode 100644 index 00000000000..897ff67dc97 --- /dev/null +++ b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaCanvas/invokeModel.js @@ -0,0 +1,93 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[javascript.v3.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] + +import { + BedrockRuntimeClient, + InvokeModelCommand, +} from "@aws-sdk/client-bedrock-runtime"; +import { saveImage } from "../../utils/image-creation.js"; +import { fileURLToPath } from "node:url"; + +/** + * This example demonstrates how to use Amazon Nova Canvas to generate images. + * It shows how to: + * - Set up the Amazon Bedrock runtime client + * - Configure the image generation parameters + * - Send a request to generate an image + * - Process the response and handle the generated image + * + * @returns {Promise} Base64-encoded image data + */ +export const invokeModel = async () => { + // Step 1: Create the Amazon Bedrock runtime client + // Credentials will be automatically loaded from the environment + const client = new BedrockRuntimeClient({ region: "us-east-1" }); + + // Step 2: Specify which model to use + // For the latest available models, see: + // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html + const modelId = "amazon.nova-canvas-v1:0"; + + // Step 3: Configure the request payload + // First, set the main parameters: + // - prompt: Text description of the image to generate + // - seed: Random number for reproducible generation (0 to 858,993,459) + const prompt = "A stylized picture of a cute old steampunk robot"; + const seed = Math.floor(Math.random() * 858993460); + + // Then, create the payload using the following structure: + // - taskType: TEXT_IMAGE (specifies text-to-image generation) + // - textToImageParams: Contains the text prompt + // - imageGenerationConfig: Contains optional generation settings (seed, quality, etc.) + // For a list of available request parameters, see: + // https://docs.aws.amazon.com/nova/latest/userguide/image-gen-req-resp-structure.html + const payload = { + taskType: "TEXT_IMAGE", + textToImageParams: { + text: prompt, + }, + imageGenerationConfig: { + seed, + quality: "standard", + }, + }; + + // Step 4: Send and process the request + // - Embed the payload in a request object + // - Send the request to the model + // - Extract and return the generated image data from the response + try { + const request = { + modelId, + body: JSON.stringify(payload), + }; + const response = await client.send(new InvokeModelCommand(request)); + + const decodedResponseBody = new TextDecoder().decode(response.body); + // The response includes an array of base64-encoded PNG images + /** @type {{images: string[]}} */ + const responseBody = JSON.parse(decodedResponseBody); + return responseBody.images[0]; // Base64-encoded image data + } catch (error) { + console.error(`ERROR: Can't invoke '${modelId}'. Reason: ${error.message}`); + throw error; + } +}; + +// If run directly, execute the example and save the generated image +if (process.argv[1] === fileURLToPath(import.meta.url)) { + console.log("Generating image. This may take a few seconds..."); + invokeModel() + .then(async (imageData) => { + const imagePath = await saveImage(imageData, "nova-canvas"); + // Example path: javascriptv3/example_code/bedrock-runtime/output/nova-canvas/image-01.png + console.log(`Image saved to: ${imagePath}`); + }) + .catch((error) => { + console.error("Execution failed:", error); + process.exitCode = 1; + }); +} +// snippet-end:[javascript.v3.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converse.js b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converse.js new file mode 100644 index 00000000000..23c8d17dd45 --- /dev/null +++ b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converse.js @@ -0,0 +1,68 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[javascript.v3.bedrock-runtime.Converse_AmazonNovaText] +// This example demonstrates how to use the Amazon Nova foundation models to generate text. +// It shows how to: +// - Set up the Amazon Bedrock runtime client +// - Create a message +// - Configure and send a request +// - Process the response + +import { + BedrockRuntimeClient, + ConversationRole, + ConverseCommand, +} from "@aws-sdk/client-bedrock-runtime"; + +// Step 1: Create the Amazon Bedrock runtime client +// Credentials will be automatically loaded from the environment +const client = new BedrockRuntimeClient({ region: "us-east-1" }); + +// Step 2: Specify which model to use: +// Available Amazon Nova models and their characteristics: +// - Amazon Nova Micro: Text-only model optimized for lowest latency and cost +// - Amazon Nova Lite: Fast, low-cost multimodal model for image, video, and text +// - Amazon Nova Pro: Advanced multimodal model balancing accuracy, speed, and cost +// +// For the most current model IDs, see: +// https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html +const modelId = "amazon.nova-lite-v1:0"; + +// Step 3: Create the message +// The message includes the text prompt and specifies that it comes from the user +const inputText = + "Describe the purpose of a 'hello world' program in one line."; +const message = { + content: [{ text: inputText }], + role: ConversationRole.USER, +}; + +// Step 4: Configure the request +// Optional parameters to control the model's response: +// - maxTokens: maximum number of tokens to generate +// - temperature: randomness (max: 1.0, default: 0.7) +// OR +// - topP: diversity of word choice (max: 1.0, default: 0.9) +// Note: Use either temperature OR topP, but not both +const request = { + modelId, + messages: [message], + inferenceConfig: { + maxTokens: 500, // The maximum response length + temperature: 0.5, // Using temperature for randomness control + //topP: 0.9, // Alternative: use topP instead of temperature + }, +}; + +// Step 5: Send and process the request +// - Send the request to the model +// - Extract and return the generated text from the response +try { + const response = await client.send(new ConverseCommand(request)); + console.log(response.output.message.content[0].text); +} catch (error) { + console.error(`ERROR: Can't invoke '${modelId}'. Reason: ${error.message}`); + throw error; +} +// snippet-end:[javascript.v3.bedrock-runtime.Converse_AmazonNovaText] diff --git a/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converseStream.js b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converseStream.js new file mode 100644 index 00000000000..5941c783f37 --- /dev/null +++ b/javascriptv3/example_code/bedrock-runtime/models/amazonNovaText/converseStream.js @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +// snippet-start:[javascript.v3.bedrock-runtime.ConverseStream_AmazonNovaText] +// This example demonstrates how to use the Amazon Nova foundation models +// to generate streaming text responses. +// It shows how to: +// - Set up the Amazon Bedrock runtime client +// - Create a message +// - Configure a streaming request +// - Process the streaming response + +import { + BedrockRuntimeClient, + ConversationRole, + ConverseStreamCommand, +} from "@aws-sdk/client-bedrock-runtime"; + +// Step 1: Create the Amazon Bedrock runtime client +// Credentials will be automatically loaded from the environment +const client = new BedrockRuntimeClient({ region: "us-east-1" }); + +// Step 2: Specify which model to use +// Available Amazon Nova models and their characteristics: +// - Amazon Nova Micro: Text-only model optimized for lowest latency and cost +// - Amazon Nova Lite: Fast, low-cost multimodal model for image, video, and text +// - Amazon Nova Pro: Advanced multimodal model balancing accuracy, speed, and cost +// +// For the most current model IDs, see: +// https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html +const modelId = "amazon.nova-lite-v1:0"; + +// Step 3: Create the message +// The message includes the text prompt and specifies that it comes from the user +const inputText = + "Describe the purpose of a 'hello world' program in one paragraph"; +const message = { + content: [{ text: inputText }], + role: ConversationRole.USER, +}; + +// Step 4: Configure the streaming request +// Optional parameters to control the model's response: +// - maxTokens: maximum number of tokens to generate +// - temperature: randomness (max: 1.0, default: 0.7) +// OR +// - topP: diversity of word choice (max: 1.0, default: 0.9) +// Note: Use either temperature OR topP, but not both +const request = { + modelId, + messages: [message], + inferenceConfig: { + maxTokens: 500, // The maximum response length + temperature: 0.5, // Using temperature for randomness control + //topP: 0.9, // Alternative: use topP instead of temperature + }, +}; + +// Step 5: Send and process the streaming request +// - Send the request to the model +// - Process each chunk of the streaming response +try { + const response = await client.send(new ConverseStreamCommand(request)); + + for await (const chunk of response.stream) { + if (chunk.contentBlockDelta) { + // Print each text chunk as it arrives + process.stdout.write(chunk.contentBlockDelta.delta?.text || ""); + } + } +} catch (error) { + console.error(`ERROR: Can't invoke '${modelId}'. Reason: ${error.message}`); + process.exitCode = 1; +} +// snippet-end:[javascript.v3.bedrock-runtime.ConverseStream_AmazonNovaText] diff --git a/javascriptv3/example_code/bedrock-runtime/package.json b/javascriptv3/example_code/bedrock-runtime/package.json index 25e81ad8de2..13b0a0d8350 100644 --- a/javascriptv3/example_code/bedrock-runtime/package.json +++ b/javascriptv3/example_code/bedrock-runtime/package.json @@ -5,12 +5,12 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/bedrock-runtime-test-results.junit.xml" }, "devDependencies": { - "vitest": "^1.6.0" + "vitest": "^1.6.1" }, "dependencies": { - "@aws-sdk/client-bedrock-runtime": "^3.658.1" + "@aws-sdk/client-bedrock-runtime": "^3.751.0" } } diff --git a/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js index db5ac65d7c6..49cf7e73591 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/converse.integration.test.js @@ -8,18 +8,19 @@ describe("Converse with text generation models", () => { const baseDirectory = path.join(__dirname, "..", "models"); const fileName = "converse.js"; - const subdirectories = [ - "ai21LabsJurassic2", - "amazonTitanText", - "anthropicClaude", - "cohereCommand", - "metaLlama", - "mistral", - ]; + const models = { + ai21LabsJurassic2: "AI21 Labs Jurassic-2", + amazonNovaText: "Amazon Nova", + amazonTitanText: "Amazon Titan", + anthropicClaude: "Anthropic Claude", + cohereCommand: "Cohere Command", + metaLlama: "Meta Llama", + mistral: "Mistral", + }; - test.each(subdirectories)( - "should invoke the model and return text", - async (subdirectory) => { + test.each(Object.entries(models).map(([sub, name]) => [name, sub]))( + "should invoke %s and return text", + async (_, subdirectory) => { const script = path.join(baseDirectory, subdirectory, fileName); const consoleLogSpy = vi.spyOn(console, "log"); diff --git a/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js index 64d964cccd0..916e976e803 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/converse_stream.integration.test.js @@ -9,17 +9,18 @@ describe("ConverseStream with text generation models", () => { const fileName = "converseStream.js"; const baseDirectory = path.join(__dirname, "..", "models"); - const subdirectories = [ - "amazonTitanText", - "anthropicClaude", - "cohereCommand", - "metaLlama", - "mistral", - ]; + const models = { + amazonNovaText: "Amazon Nova", + amazonTitanText: "Amazon Titan", + anthropicClaude: "Anthropic Claude", + cohereCommand: "Cohere Command", + metaLlama: "Meta Llama", + mistral: "Mistral", + }; - test.each(subdirectories)( - "should invoke the model and return text", - async (subdirectory) => { + test.each(Object.entries(models).map(([sub, name]) => [name, sub]))( + "should invoke %s and return text", + async (_, subdirectory) => { let output = ""; const outputStream = new Writable({ write(/** @type string */ chunk, encoding, callback) { diff --git a/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js b/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js new file mode 100644 index 00000000000..fccb5495126 --- /dev/null +++ b/javascriptv3/example_code/bedrock-runtime/tests/image_generation.integration.test.js @@ -0,0 +1,13 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { invokeModel } from "../models/amazonNovaCanvas/invokeModel.js"; +import { expectToBeANonEmptyString } from "./test_tools.js"; + +describe("Invoking Amazon Nova Canvas", () => { + it("should return a response", async () => { + const response = await invokeModel(); + expectToBeANonEmptyString(response); + }); +}); diff --git a/javascriptv3/example_code/bedrock-runtime/tests/test_tools.js b/javascriptv3/example_code/bedrock-runtime/tests/test_tools.js index 7c12f2de8d2..5922dc95386 100644 --- a/javascriptv3/example_code/bedrock-runtime/tests/test_tools.js +++ b/javascriptv3/example_code/bedrock-runtime/tests/test_tools.js @@ -10,5 +10,5 @@ import { expect } from "vitest"; */ export const expectToBeANonEmptyString = (string) => { expect(typeof string).toBe("string"); - expect(string.length).not.toBe(0); + expect(string).not.toHaveLength(0); }; diff --git a/javascriptv3/example_code/bedrock-runtime/utils/image-creation.js b/javascriptv3/example_code/bedrock-runtime/utils/image-creation.js new file mode 100644 index 00000000000..2c3ae971e54 --- /dev/null +++ b/javascriptv3/example_code/bedrock-runtime/utils/image-creation.js @@ -0,0 +1,54 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { mkdir, readdir, writeFile } from "node:fs/promises"; +import { dirname, join } from "node:path"; +import { fileURLToPath } from "node:url"; + +/** + * Creates the output directory if it doesn't exist and gets the next available image number + * @param {string} outputDir - The directory path where images will be saved + * @returns {Promise} The next available image number + */ +async function prepareOutputDirectory(outputDir) { + try { + await mkdir(outputDir, { recursive: true }); + const files = await readdir(outputDir); + + // Find the highest existing image number + const numbers = files + .filter((file) => file.match(/^image-\d+\.png$/)) + .map((file) => Number.parseInt(file.match(/^image-(\d+)\.png$/)[1])); + + return numbers.length > 0 ? Math.max(...numbers) + 1 : 1; + } catch (error) { + console.error(`Error preparing output directory: ${error.message}`); + throw error; + } +} + +/** + * Saves an image to the output directory with automatic numbering + * @param {string} imageData - Base64-encoded image data + * @param {string} modelName - Name of the model used to generate the image + * @returns {Promise} The full path where the image was saved + */ +export async function saveImage(imageData, modelName) { + // Set up the output directory path relative to this utility script + const utilityDir = dirname(fileURLToPath(import.meta.url)); + const outputDir = join(utilityDir, "..", "output", modelName); + + // Get the next available image number + const imageNumber = await prepareOutputDirectory(outputDir); + + // Create the image filename with padded number + const paddedNumber = imageNumber.toString().padStart(2, "0"); + const filename = `image-${paddedNumber}.png`; + const fullPath = join(outputDir, filename); + + // Save the image + const buffer = Buffer.from(imageData, "base64"); + await writeFile(fullPath, buffer); + + return fullPath; +} diff --git a/javascriptv3/example_code/bedrock/package.json b/javascriptv3/example_code/bedrock/package.json index 21ec6fdb75d..9ca3dc2f322 100644 --- a/javascriptv3/example_code/bedrock/package.json +++ b/javascriptv3/example_code/bedrock/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/bedrock-test-results.junit.xml" }, "dependencies": { "@aws-sdk/client-bedrock": "^3.485.0" diff --git a/javascriptv3/example_code/cloudwatch-events/package.json b/javascriptv3/example_code/cloudwatch-events/package.json index 9e500762b11..ff3c03fa6c4 100644 --- a/javascriptv3/example_code/cloudwatch-events/package.json +++ b/javascriptv3/example_code/cloudwatch-events/package.json @@ -11,7 +11,7 @@ }, "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/cloudwatchevents-test-results.junit.xml" }, "devDependencies": { "vitest": "^1.6.0" diff --git a/javascriptv3/example_code/cloudwatch-logs/package.json b/javascriptv3/example_code/cloudwatch-logs/package.json index 0c529bb1821..3ec85489167 100644 --- a/javascriptv3/example_code/cloudwatch-logs/package.json +++ b/javascriptv3/example_code/cloudwatch-logs/package.json @@ -11,8 +11,8 @@ "@aws-sdk/client-lambda": "^3.216.0" }, "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml", - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/cloudwatchlogs-test-results.junit.xml" }, "devDependencies": { "vitest": "^1.6.0" diff --git a/javascriptv3/example_code/cloudwatch/package.json b/javascriptv3/example_code/cloudwatch/package.json index 3466ca5e2a5..43152195365 100644 --- a/javascriptv3/example_code/cloudwatch/package.json +++ b/javascriptv3/example_code/cloudwatch/package.json @@ -10,7 +10,7 @@ "@aws-sdk/client-ec2": "^3.213.0" }, "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/cloudwatch-test-results.junit.xml" }, "devDependencies": { "uuid": "^9.0.0", diff --git a/javascriptv3/example_code/codebuild/package.json b/javascriptv3/example_code/codebuild/package.json index de3b34e3043..68e31086a7d 100644 --- a/javascriptv3/example_code/codebuild/package.json +++ b/javascriptv3/example_code/codebuild/package.json @@ -9,7 +9,7 @@ }, "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/codebuild-test-results.junit.xml" }, "devDependencies": { "@aws-sdk/client-iam": "^3.391.0", diff --git a/javascriptv3/example_code/codecommit/package.json b/javascriptv3/example_code/codecommit/package.json index 02cebf4a042..fc12459865e 100644 --- a/javascriptv3/example_code/codecommit/package.json +++ b/javascriptv3/example_code/codecommit/package.json @@ -5,7 +5,8 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/codecommit-test-results.junit.xml" }, "dependencies": { "@aws-sdk/client-codecommit": "^3.427.0" diff --git a/javascriptv3/example_code/cognito-identity-provider/package.json b/javascriptv3/example_code/cognito-identity-provider/package.json index 0b581ecf36f..f3c8928c5be 100644 --- a/javascriptv3/example_code/cognito-identity-provider/package.json +++ b/javascriptv3/example_code/cognito-identity-provider/package.json @@ -7,8 +7,8 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/cognito-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/cross-services/aurora-serverless-app/package.json b/javascriptv3/example_code/cross-services/aurora-serverless-app/package.json index 2704310637c..085cfa4eff2 100644 --- a/javascriptv3/example_code/cross-services/aurora-serverless-app/package.json +++ b/javascriptv3/example_code/cross-services/aurora-serverless-app/package.json @@ -5,7 +5,7 @@ "type": "module", "main": "build/index.js", "scripts": { - "test": "vitest run **/*.unit.test.ts", + "test": "vitest run unit", "start": "node ./watch.js" }, "author": "corepyle@amazon.com", diff --git a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/AnalyzeSentiment/package.json b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/AnalyzeSentiment/package.json index 172f8e9f1cc..047a6923641 100644 --- a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/AnalyzeSentiment/package.json +++ b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/AnalyzeSentiment/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", + "test": "vitest run unit", "build": "rollup -c" }, "author": "Corey Pyle ", diff --git a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/ExtractText/package.json b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/ExtractText/package.json index 791fa7de51e..988a7bc54a1 100644 --- a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/ExtractText/package.json +++ b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/ExtractText/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", + "test": "vitest run unit", "build": "rollup -c" }, "author": "Corey Pyle ", diff --git a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/SynthesizeAudio/package.json b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/SynthesizeAudio/package.json index b2b992fd2fb..24373853a16 100644 --- a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/SynthesizeAudio/package.json +++ b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/SynthesizeAudio/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", + "test": "vitest run unit", "build": "rollup -c" }, "author": "Corey Pyle ", diff --git a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/TranslateText/package.json b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/TranslateText/package.json index db59ed6f82a..61d44f844c4 100644 --- a/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/TranslateText/package.json +++ b/javascriptv3/example_code/cross-services/feedback-sentiment-analyzer/TranslateText/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", + "test": "vitest run unit", "build": "rollup -c" }, "author": "Corey Pyle ", diff --git a/javascriptv3/example_code/cross-services/photo-asset-manager/package.json b/javascriptv3/example_code/cross-services/photo-asset-manager/package.json index 22191ba173e..3ac3a52ea67 100644 --- a/javascriptv3/example_code/cross-services/photo-asset-manager/package.json +++ b/javascriptv3/example_code/cross-services/photo-asset-manager/package.json @@ -6,7 +6,7 @@ "main": "index.js", "scripts": { "build": "rollup -c", - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/cross-services/wkflw-pools-triggers/cdk/package.json b/javascriptv3/example_code/cross-services/wkflw-pools-triggers/cdk/package.json index 1811921dfff..af2363eab58 100644 --- a/javascriptv3/example_code/cross-services/wkflw-pools-triggers/cdk/package.json +++ b/javascriptv3/example_code/cross-services/wkflw-pools-triggers/cdk/package.json @@ -7,7 +7,7 @@ "scripts": { "build": "tsc", "watch": "tsc -w", - "test": "vitest run **/*.unit.test.ts", + "test": "vitest run unit", "cdk": "cdk" }, "devDependencies": { diff --git a/javascriptv3/example_code/cross-services/wkflw-pools-triggers/package.json b/javascriptv3/example_code/cross-services/wkflw-pools-triggers/package.json index 9b3196d9b06..eacfff7e5b9 100644 --- a/javascriptv3/example_code/cross-services/wkflw-pools-triggers/package.json +++ b/javascriptv3/example_code/cross-services/wkflw-pools-triggers/package.json @@ -6,7 +6,7 @@ "type": "module", "scripts": { "test": "npm run cdk-test", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/wkflw-pools-triggers-test-results.junit.xml", "cdk-test": "npm run test --prefix ./cdk" }, "engines": { diff --git a/javascriptv3/example_code/cross-services/wkflw-resilient-service/package.json b/javascriptv3/example_code/cross-services/wkflw-resilient-service/package.json index a5e6f99b238..3dd601fa0d6 100644 --- a/javascriptv3/example_code/cross-services/wkflw-resilient-service/package.json +++ b/javascriptv3/example_code/cross-services/wkflw-resilient-service/package.json @@ -6,7 +6,7 @@ "author": "Corey Pyle ", "license": "Apache-2.0", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/wkflw-resilient-service-test-results.junit.xml" }, "dependencies": { "@aws-sdk/client-auto-scaling": "^3.438.0", diff --git a/javascriptv3/example_code/cross-services/wkflw-topics-queues/package.json b/javascriptv3/example_code/cross-services/wkflw-topics-queues/package.json index 1cec553bc24..02579945eac 100644 --- a/javascriptv3/example_code/cross-services/wkflw-topics-queues/package.json +++ b/javascriptv3/example_code/cross-services/wkflw-topics-queues/package.json @@ -4,7 +4,7 @@ "description": "", "main": "index.js", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/dynamodb/actions/create-table.js b/javascriptv3/example_code/dynamodb/actions/create-table.js index 925c0103b02..9f2d22b698a 100644 --- a/javascriptv3/example_code/dynamodb/actions/create-table.js +++ b/javascriptv3/example_code/dynamodb/actions/create-table.js @@ -26,10 +26,7 @@ export const main = async () => { KeyType: "HASH", }, ], - ProvisionedThroughput: { - ReadCapacityUnits: 1, - WriteCapacityUnits: 1, - }, + BillingMode: "PAY_PER_REQUEST", }); const response = await client.send(command); diff --git a/javascriptv3/example_code/dynamodb/package.json b/javascriptv3/example_code/dynamodb/package.json index b2240caf2e5..dcd2362269e 100644 --- a/javascriptv3/example_code/dynamodb/package.json +++ b/javascriptv3/example_code/dynamodb/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/dynamodb-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/dynamodb/tests/delete-table.integration.test.js b/javascriptv3/example_code/dynamodb/tests/delete-table.integration.test.js index 8e0418081e2..1ca23a54e71 100644 --- a/javascriptv3/example_code/dynamodb/tests/delete-table.integration.test.js +++ b/javascriptv3/example_code/dynamodb/tests/delete-table.integration.test.js @@ -28,10 +28,7 @@ describe("delete-table", () => { KeyType: "HASH", }, ], - ProvisionedThroughput: { - ReadCapacityUnits: 1, - WriteCapacityUnits: 1, - }, + BillingMode: "PAY_PER_REQUEST", }); await client.send(createTableCommand); diff --git a/javascriptv3/example_code/ec2/package.json b/javascriptv3/example_code/ec2/package.json index 1a4c43b968c..76afded06b4 100644 --- a/javascriptv3/example_code/ec2/package.json +++ b/javascriptv3/example_code/ec2/package.json @@ -5,7 +5,7 @@ "license": "Apache 2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/elastic-load-balancing-v2/package.json b/javascriptv3/example_code/elastic-load-balancing-v2/package.json index 08d56c7b16a..6f26b154582 100644 --- a/javascriptv3/example_code/elastic-load-balancing-v2/package.json +++ b/javascriptv3/example_code/elastic-load-balancing-v2/package.json @@ -3,7 +3,7 @@ "version": "1.0.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/elastic-load-balancing-test-results.junit.xml" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/eventbridge/package.json b/javascriptv3/example_code/eventbridge/package.json index 6c7d9736f00..b6a3bc3a70b 100644 --- a/javascriptv3/example_code/eventbridge/package.json +++ b/javascriptv3/example_code/eventbridge/package.json @@ -4,7 +4,7 @@ "author": "Corey Pyle ", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/eventbridge-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/glue/package.json b/javascriptv3/example_code/glue/package.json index b771b70b889..1dd662954b1 100644 --- a/javascriptv3/example_code/glue/package.json +++ b/javascriptv3/example_code/glue/package.json @@ -6,8 +6,8 @@ "author": "Corey Pyle ", "license": "Apache-2.0", "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/glue-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.1", diff --git a/javascriptv3/example_code/iam/package.json b/javascriptv3/example_code/iam/package.json index 067e6c55a01..03416d54973 100644 --- a/javascriptv3/example_code/iam/package.json +++ b/javascriptv3/example_code/iam/package.json @@ -5,7 +5,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/iam-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/iotsitewise/README.md b/javascriptv3/example_code/iotsitewise/README.md index c4e88a366b7..f48868b20ca 100644 --- a/javascriptv3/example_code/iotsitewise/README.md +++ b/javascriptv3/example_code/iotsitewise/README.md @@ -107,8 +107,17 @@ node ./hello.js #### Learn the basics -This example shows you how to learn core operations for AWS IoT SiteWise using an AWS SDK. - +This example shows you how to do the following: + +- Create an AWS IoT SiteWise Asset Model. +- Create an AWS IoT SiteWise Asset. +- Retrieve the property ID values. +- Send data to an AWS IoT SiteWise Asset. +- Retrieve the value of the AWS IoT SiteWise Asset property. +- Create an AWS IoT SiteWise Portal. +- Create an AWS IoT SiteWise Gateway. +- Describe the AWS IoT SiteWise Gateway. +- Delete the AWS IoT SiteWise Assets. diff --git a/javascriptv3/example_code/iotsitewise/package.json b/javascriptv3/example_code/iotsitewise/package.json index 15f618aac7c..2b89b43a002 100644 --- a/javascriptv3/example_code/iotsitewise/package.json +++ b/javascriptv3/example_code/iotsitewise/package.json @@ -6,8 +6,8 @@ "test": "tests" }, "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/sitewise-test-results.junit.xml" }, "author": "beqqrry@amazon.com", "license": "ISC", diff --git a/javascriptv3/example_code/kinesis/package.json b/javascriptv3/example_code/kinesis/package.json index f270994479a..2f69750ed40 100644 --- a/javascriptv3/example_code/kinesis/package.json +++ b/javascriptv3/example_code/kinesis/package.json @@ -5,7 +5,7 @@ "test": "tests" }, "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/kinesis-test-results.junit.xml" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/lambda/package.json b/javascriptv3/example_code/lambda/package.json index d93a590f7a0..1e67faa3bad 100644 --- a/javascriptv3/example_code/lambda/package.json +++ b/javascriptv3/example_code/lambda/package.json @@ -7,8 +7,8 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/lambda-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/libs/package.json b/javascriptv3/example_code/libs/package.json index ab8ea4369e7..5700112fffb 100644 --- a/javascriptv3/example_code/libs/package.json +++ b/javascriptv3/example_code/libs/package.json @@ -6,7 +6,7 @@ "license": "Apache-2.0", "type": "module", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "dependencies": { "@aws-sdk/client-cloudformation": "^3.637.0", diff --git a/javascriptv3/example_code/medical-imaging/package.json b/javascriptv3/example_code/medical-imaging/package.json index 72e664b221f..004b1f67a3e 100644 --- a/javascriptv3/example_code/medical-imaging/package.json +++ b/javascriptv3/example_code/medical-imaging/package.json @@ -10,8 +10,8 @@ "@aws-sdk/client-sts": "^3.620.0" }, "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/medical-imaging-test-results.junit.xml" }, "type": "module", "devDependencies": { diff --git a/javascriptv3/example_code/nodegetstarted/README.md b/javascriptv3/example_code/nodegetstarted/README.md index 5d22e77b2b9..ee2eb08ef08 100644 --- a/javascriptv3/example_code/nodegetstarted/README.md +++ b/javascriptv3/example_code/nodegetstarted/README.md @@ -38,7 +38,7 @@ The final package.json should look similar to this: "description": "This guide shows you how to initialize an NPM package, add a service client to your package, and use the JavaScript SDK to call a service action.", "main": "index.js", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/nodegetstarted/package.json b/javascriptv3/example_code/nodegetstarted/package.json index ddbcf14efd7..bea0152cec0 100644 --- a/javascriptv3/example_code/nodegetstarted/package.json +++ b/javascriptv3/example_code/nodegetstarted/package.json @@ -4,7 +4,7 @@ "description": "This guide shows you how to initialize an NPM package, add a service client to your package, and use the JavaScript SDK to call a service action.", "main": "index.js", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/javascriptv3-get-started-node-test-results.junit.xml" }, "author": "Corey Pyle ", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/personalize/package.json b/javascriptv3/example_code/personalize/package.json index f8903f776fe..2f0d59abe8b 100644 --- a/javascriptv3/example_code/personalize/package.json +++ b/javascriptv3/example_code/personalize/package.json @@ -4,7 +4,7 @@ "description": "personalize operations", "main": "personalizeClients.js", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "type": "module", "author": "Samuel Ashman ", diff --git a/javascriptv3/example_code/s3/README.md b/javascriptv3/example_code/s3/README.md index f352d4c36da..7afa1f9a74f 100644 --- a/javascriptv3/example_code/s3/README.md +++ b/javascriptv3/example_code/s3/README.md @@ -80,6 +80,7 @@ functions within the same service. - [Create a web page that lists Amazon S3 objects](../web/s3/list-objects/src/App.tsx) - [Delete all objects in a bucket](scenarios/delete-all-objects.js) - [Lock Amazon S3 objects](scenarios/object-locking/index.js) +- [Make conditional requests](scenarios/conditional-requests/index.js) - [Upload or download large files](scenarios/multipart-upload.js) @@ -200,6 +201,18 @@ This example shows you how to work with S3 object lock features. +#### Make conditional requests + +This example shows you how to add preconditions to Amazon S3 requests. + + + + + + + + + #### Upload or download large files This example shows you how to upload or download large files to and from Amazon S3. @@ -238,4 +251,4 @@ in the `javascriptv3` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-match.js b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-match.js new file mode 100644 index 00000000000..93495fb5914 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-match.js @@ -0,0 +1,91 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + CopyObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +// Optionally edit the default key name of the copied object in 'object_name.json' +import data from "../scenarios/conditional-requests/object_name.json" assert { + type: "json", +}; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ sourceBucketName: string, sourceKeyName: string, destinationBucketName: string, eTag: string }} + */ +export const main = async ({ + sourceBucketName, + sourceKeyName, + destinationBucketName, + eTag, +}) => { + const client = new S3Client({}); + const name = data.name; + try { + const response = await client.send( + new CopyObjectCommand({ + CopySource: `${sourceBucketName}/${sourceKeyName}`, + Bucket: destinationBucketName, + Key: `${name}${sourceKeyName}`, + CopySourceIfMatch: eTag, + }), + ); + console.log("Successfully copied object to bucket."); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while copying object "${sourceKeyName}" from "${sourceBucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Unable to copy object "${sourceKeyName}" to bucket "${sourceBucketName}": ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + sourceBucketName: { + type: "string", + required: true, + }, + sourceKeyName: { + type: "string", + required: true, + }, + destinationBucketName: { + type: "string", + required: true, + }, + eTag: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-modified-since.js b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-modified-since.js new file mode 100644 index 00000000000..8f3cdfa5363 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-modified-since.js @@ -0,0 +1,92 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + CopyObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +// Optionally edit the default key name of the copied object in 'object_name.json' +import data from "../scenarios/conditional-requests/object_name.json" assert { + type: "json", +}; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ sourceBucketName: string, sourceKeyName: string, destinationBucketName: string }} + */ +export const main = async ({ + sourceBucketName, + sourceKeyName, + destinationBucketName, +}) => { + const date = new Date(); + date.setDate(date.getDate() - 1); + + const name = data.name; + const client = new S3Client({}); + const copySource = `${sourceBucketName}/${sourceKeyName}`; + const copiedKey = name + sourceKeyName; + + try { + const response = await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: destinationBucketName, + Key: copiedKey, + CopySourceIfModifiedSince: date, + }), + ); + console.log("Successfully copied object to bucket."); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while copying object "${sourceKeyName}" from "${sourceBucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while copying object from ${sourceBucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + sourceBucketName: { + type: "string", + required: true, + }, + sourceKeyName: { + type: "string", + required: true, + }, + destinationBucketName: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-none-match.js b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-none-match.js new file mode 100644 index 00000000000..d4aed2f1e01 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-none-match.js @@ -0,0 +1,92 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + CopyObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +// Optionally edit the default key name of the copied object in 'object_name.json' +import data from "../scenarios/conditional-requests/object_name.json" assert { + type: "json", +}; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ sourceBucketName: string, sourceKeyName: string, destinationBucketName: string, eTag: string }} + */ +export const main = async ({ + sourceBucketName, + sourceKeyName, + destinationBucketName, + eTag, +}) => { + const client = new S3Client({}); + const name = data.name; + + try { + const response = await client.send( + new CopyObjectCommand({ + CopySource: `${sourceBucketName}/${sourceKeyName}`, + Bucket: destinationBucketName, + Key: `${name}${sourceKeyName}`, + CopySourceIfNoneMatch: eTag, + }), + ); + console.log("Successfully copied object to bucket."); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while copying object "${sourceKeyName}" from "${sourceBucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Unable to copy object "${sourceKeyName}" to bucket "${sourceBucketName}": ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + sourceBucketName: { + type: "string", + required: true, + }, + sourceKeyName: { + type: "string", + required: true, + }, + destinationBucketName: { + type: "string", + required: true, + }, + eTag: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-unmodified-since.js b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-unmodified-since.js new file mode 100644 index 00000000000..5ffee11f44b --- /dev/null +++ b/javascriptv3/example_code/s3/actions/copy-object-conditional-request-if-unmodified-since.js @@ -0,0 +1,91 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + CopyObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +// Optionally edit the default key name of the copied object in 'object_name.json' +import data from "../scenarios/conditional-requests/object_name.json" assert { + type: "json", +}; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ sourceBucketName: string, sourceKeyName: string, destinationBucketName: string }} + */ +export const main = async ({ + sourceBucketName, + sourceKeyName, + destinationBucketName, +}) => { + const date = new Date(); + date.setDate(date.getDate() - 1); + const client = new S3Client({}); + const name = data.name; + const copiedKey = name + sourceKeyName; + const copySource = `${sourceBucketName}/${sourceKeyName}`; + + try { + const response = await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: destinationBucketName, + Key: copiedKey, + CopySourceIfUnmodifiedSince: date, + }), + ); + console.log("Successfully copied object to bucket."); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while copying object "${sourceKeyName}" from "${sourceBucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while copying object from ${sourceBucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + sourceBucketName: { + type: "string", + required: true, + }, + sourceKeyName: { + type: "string", + required: true, + }, + destinationBucketName: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-match.js b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-match.js new file mode 100644 index 00000000000..2720e21f069 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-match.js @@ -0,0 +1,78 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + GetObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ bucketName: string, key: string, eTag: string }} + */ +export const main = async ({ bucketName, key, eTag }) => { + const client = new S3Client({}); + + try { + const response = await client.send( + new GetObjectCommand({ + Bucket: bucketName, + Key: key, + IfMatch: eTag, + }), + ); + // The Body object also has 'transformToByteArray' and 'transformToWebStream' methods. + const str = await response.Body.transformToString(); + console.log("Success. Here is text of the file:", str); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + bucketName: { + type: "string", + required: true, + }, + key: { + type: "string", + required: true, + }, + eTag: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-modified-since.js b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-modified-since.js new file mode 100644 index 00000000000..d51688f8aac --- /dev/null +++ b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-modified-since.js @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + GetObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ bucketName: string, key: string }} + */ +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); + const date = new Date(); + date.setDate(date.getDate() - 1); + try { + const response = await client.send( + new GetObjectCommand({ + Bucket: bucketName, + Key: key, + IfModifiedSince: date, + }), + ); + // The Body object also has 'transformToByteArray' and 'transformToWebStream' methods. + const str = await response.Body.transformToString(); + console.log("Success. Here is text of the file:", str); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + bucketName: { + type: "string", + required: true, + }, + key: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-none-match.js b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-none-match.js new file mode 100644 index 00000000000..10258ee07ce --- /dev/null +++ b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-none-match.js @@ -0,0 +1,78 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + GetObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ bucketName: string, key: string, eTag: string }} + */ +export const main = async ({ bucketName, key, eTag }) => { + const client = new S3Client({}); + + try { + const response = await client.send( + new GetObjectCommand({ + Bucket: bucketName, + Key: key, + IfNoneMatch: eTag, + }), + ); + // The Body object also has 'transformToByteArray' and 'transformToWebStream' methods. + const str = await response.Body.transformToString(); + console.log("Success. Here is text of the file:", str); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + bucketName: { + type: "string", + required: true, + }, + key: { + type: "string", + required: true, + }, + eTag: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-unmodified-since.js b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-unmodified-since.js new file mode 100644 index 00000000000..a17b94c7b89 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/get-object-conditional-request-if-unmodified-since.js @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + GetObjectCommand, + NoSuchKey, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ bucketName: string, key: string }} + */ +export const main = async ({ bucketName, key }) => { + const client = new S3Client({}); + const date = new Date(); + date.setDate(date.getDate() - 1); + try { + const response = await client.send( + new GetObjectCommand({ + Bucket: bucketName, + Key: key, + IfUnmodifiedSince: date, + }), + ); + // The Body object also has 'transformToByteArray' and 'transformToWebStream' methods. + const str = await response.Body.transformToString(); + console.log("Success. Here is text of the file:", str); + } catch (caught) { + if (caught instanceof NoSuchKey) { + console.error( + `Error from S3 while getting object "${key}" from "${bucketName}". No such key exists.`, + ); + } else if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while getting object from ${bucketName}. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + bucketName: { + type: "string", + required: true, + }, + key: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/put-object-conditional-request-if-none-match.js b/javascriptv3/example_code/s3/actions/put-object-conditional-request-if-none-match.js new file mode 100644 index 00000000000..0583b016a3d --- /dev/null +++ b/javascriptv3/example_code/s3/actions/put-object-conditional-request-if-none-match.js @@ -0,0 +1,67 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { + PutObjectCommand, + S3Client, + S3ServiceException, +} from "@aws-sdk/client-s3"; +import { readFile } from "node:fs/promises"; + +/** + * Get a single object from a specified S3 bucket. + * @param {{ destinationBucketName: string }} + */ +export const main = async ({ destinationBucketName }) => { + const client = new S3Client({}); + const filePath = "./text01.txt"; + try { + await client.send( + new PutObjectCommand({ + Bucket: destinationBucketName, + Key: "text01.txt", + Body: await readFile(filePath), + IfNoneMatch: "*", + }), + ); + console.log( + "File written to bucket because the key name is not a duplicate.", + ); + } catch (caught) { + if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while uploading object to bucket. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } +}; + +// Call function if run directly +import { parseArgs } from "node:util"; +import { + isMain, + validateArgs, +} from "@aws-doc-sdk-examples/lib/utils/util-node.js"; + +const loadArgs = () => { + const options = { + destinationBucketName: { + type: "string", + required: true, + }, + }; + const results = parseArgs({ options }); + const { errors } = validateArgs({ options }, results); + return { errors, results }; +}; + +if (isMain(import.meta.url)) { + const { errors, results } = loadArgs(); + if (!errors) { + main(results.values); + } else { + console.error(errors.join("\n")); + } +} diff --git a/javascriptv3/example_code/s3/actions/text01.txt b/javascriptv3/example_code/s3/actions/text01.txt new file mode 100644 index 00000000000..11e519d1129 --- /dev/null +++ b/javascriptv3/example_code/s3/actions/text01.txt @@ -0,0 +1 @@ +This is a sample text file for use in some action examples in this folder. \ No newline at end of file diff --git a/javascriptv3/example_code/s3/package.json b/javascriptv3/example_code/s3/package.json index 98d8ca23f58..4733159067c 100644 --- a/javascriptv3/example_code/s3/package.json +++ b/javascriptv3/example_code/s3/package.json @@ -3,8 +3,8 @@ "version": "1.0.0", "description": "Examples demonstrating how to use the AWS SDK for JavaScript (v3) to interact with Amazon S3.", "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/s3-test-results.junit.xml" }, "author": "corepyle@amazon.com", "license": "Apache-2.0", diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/.gitignore b/javascriptv3/example_code/s3/scenarios/conditional-requests/.gitignore new file mode 100644 index 00000000000..b7887cb1903 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/.gitignore @@ -0,0 +1 @@ +state.json \ No newline at end of file diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/README.md b/javascriptv3/example_code/s3/scenarios/conditional-requests/README.md new file mode 100644 index 00000000000..6fb4f7558c2 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/README.md @@ -0,0 +1,64 @@ +# Amazon S3 Conditional Requests Feature Scenario for the SDK for JavaScript (v3) + +## Overview + +This example demonstrates how to use the AWS SDK for JavaScript (v3) to work with Amazon Simple Storage Service (Amazon S3) conditional request features. The scenario demonstrates how to add preconditions to S3 operations, and how those operations will succeed or fail based on the conditional requests. + +[Amazon S3 Conditional Requests](https://docs.aws.amazon.com/AmazonS3/latest/userguide/conditional-requests.html) are used to add preconditions to S3 read, copy, or write requests. + +## ⚠ Important + +- Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +- Running the tests might result in charges to your AWS account. +- We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +- This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../../../../README.md#prerequisites) in the `javascriptv3` folder. + +### Scenarios + +This example uses a feature scenario to demonstrate various aspects of S3 conditional requests. The scenario is divided into three stages: + +1. **Deploy**: Create test buckets and objects. +2. **Demo**: Explore S3 conditional requests by listing objects, attempting to read or write with conditional requests, and viewing request results. +3. **Clean**: Delete all objects and buckets. + +#### Deploy Stage + +```bash +node index.js -s deploy +``` + +#### Demo Stage + +```bash +node index.js -s demo +``` + +#### Clean Stage + +```bash +node index.js -s clean +``` + +## Tests + +⚠ Running tests might result in charges to your AWS account. + +To find instructions for running these tests, see the [README](../../../../README.md#tests) in the `javascriptv3` folder. + +## Additional resources + +- [Amazon S3 Developer Guide](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lock.html) +- [Amazon S3 API Reference](https://docs.aws.amazon.com/AmazonS3/latest/API/Welcome.html) +- [SDK for JavaScript (v3) Amazon S3 reference](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/clients/client-s3/index.html) + +--- + +Copyright Amazon.com, Inc. or its cd ..affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 \ No newline at end of file diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.js new file mode 100644 index 00000000000..2cb06c6b945 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.js @@ -0,0 +1,69 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { + DeleteObjectCommand, + DeleteBucketCommand, + ListObjectVersionsCommand, +} from "@aws-sdk/client-s3"; + +/** + * @typedef {import("@aws-doc-sdk-examples/lib/scenario/index.js")} Scenarios + */ + +/** + * @typedef {import("@aws-sdk/client-s3").S3Client} S3Client + */ + +/** + * @param {Scenarios} scenarios + */ +const confirmCleanup = (scenarios) => + new scenarios.ScenarioInput("confirmCleanup", "Clean up resources?", { + type: "confirm", + }); + +/** + * @param {Scenarios} scenarios + * @param {S3Client} client + */ +const cleanupAction = (scenarios, client) => + new scenarios.ScenarioAction("cleanupAction", async (state) => { + const { sourceBucketName, destinationBucketName } = state; + const buckets = [sourceBucketName, destinationBucketName].filter((b) => b); + + for (const bucket of buckets) { + try { + let objectsResponse; + objectsResponse = await client.send( + new ListObjectVersionsCommand({ + Bucket: bucket, + }), + ); + for (const version of objectsResponse.Versions || []) { + const { Key, VersionId } = version; + try { + await client.send( + new DeleteObjectCommand({ + Bucket: bucket, + Key, + VersionId, + }), + ); + } catch (err) { + console.log(`An error occurred: ${err.message} `); + } + } + } catch (e) { + if (e instanceof Error && e.name === "NoSuchBucket") { + console.log("Objects and buckets have already been deleted."); + continue; + } + throw e; + } + + await client.send(new DeleteBucketCommand({ Bucket: bucket })); + console.log(`Delete for ${bucket} complete.`); + } + }); + +export { confirmCleanup, cleanupAction }; diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.unit.test.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.unit.test.js new file mode 100644 index 00000000000..c2d8ac15e29 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/clean.steps.unit.test.js @@ -0,0 +1,44 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { describe, it, expect, vi } from "vitest"; +import { ListObjectVersionsCommand } from "@aws-sdk/client-s3"; + +import * as Scenarios from "@aws-doc-sdk-examples/lib/scenario/index.js"; + +import { cleanupAction } from "./clean.steps.js"; + +describe("clean.steps.js", () => { + it("should call ListObjectVersionsCommand once for each bucket", async () => { + const mockClient = { + send: vi + .fn() + .mockResolvedValueOnce({ Versions: [] }) // ListObjectVersionsCommand + .mockResolvedValueOnce({}) // DeleteBucketCommand + .mockResolvedValueOnce({ Versions: [] }) // ListObjectVersionsCommand + .mockResolvedValueOnce({}), // DeleteBucketCommand + }; + + const state = { + sourceBucketName: "bucket-no-lock", + destinationBucketName: "bucket-lock-enabled", + }; + + const action = cleanupAction(Scenarios, mockClient); + + await action.handle(state); + + expect(mockClient.send).toHaveBeenCalledTimes(4); + expect(mockClient.send).toHaveBeenNthCalledWith( + 1, + expect.any(ListObjectVersionsCommand), + ); + expect(mockClient.send).toHaveBeenNthCalledWith( + 3, + expect.any(ListObjectVersionsCommand), + ); + expect(mockClient.send).toHaveBeenNthCalledWith( + 3, + expect.any(ListObjectVersionsCommand), + ); + }); +}); diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/conditional-requests.integration.test.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/conditional-requests.integration.test.js new file mode 100644 index 00000000000..a127c8b9e4c --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/conditional-requests.integration.test.js @@ -0,0 +1,37 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { describe, it, expect, afterAll } from "vitest"; +import { S3Client, ListBucketsCommand } from "@aws-sdk/client-s3"; +import { createBucketsAction } from "./setup.steps.js"; +import * as Scenarios from "@aws-doc-sdk-examples/lib/scenario/index.js"; +import { legallyEmptyAndDeleteBuckets } from "../../libs/s3Utils.js"; + +const bucketPrefix = "js-conditional-requests"; +const client = new S3Client({}); + +describe("S3 Object Locking Integration Tests", () => { + const state = { + sourceBucketName: `${bucketPrefix}-no-lock`, + destinationBucketName: `${bucketPrefix}-lock-enabled`, + }; + + afterAll(async () => { + // Clean up resources + const buckets = [state.sourceBucketName, state.destinationBucketName]; + + await legallyEmptyAndDeleteBuckets(buckets); + }); + + it("should create buckets with correct configurations", async () => { + const action = createBucketsAction(Scenarios, client); + await action.handle(state); + + const bucketList = await client.send(new ListBucketsCommand({})); + expect(bucketList.Buckets?.map((bucket) => bucket.Name)).toContain( + state.sourceBucketName, + ); + expect(bucketList.Buckets?.map((bucket) => bucket.Name)).toContain( + state.destinationBucketName, + ); + }); +}); diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/index.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/index.js new file mode 100644 index 00000000000..6ba394378c7 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/index.js @@ -0,0 +1,81 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import * as Scenarios from "@aws-doc-sdk-examples/lib/scenario/index.js"; +import { + exitOnFalse, + loadState, + saveState, +} from "@aws-doc-sdk-examples/lib/scenario/steps-common.js"; + +import { welcome, welcomeContinue } from "./welcome.steps.js"; +import { + confirmCreateBuckets, + confirmPopulateBuckets, + createBuckets, + createBucketsAction, + getBucketPrefix, + populateBuckets, + populateBucketsAction, +} from "./setup.steps.js"; + +/** + * @param {Scenarios} scenarios + * @param {Record} initialState + */ +export const getWorkflowStages = (scenarios, initialState = {}) => { + const client = new S3Client({}); + + return { + deploy: new scenarios.Scenario( + "S3 Conditional Requests - Deploy", + [ + welcome(scenarios), + welcomeContinue(scenarios), + exitOnFalse(scenarios, "welcomeContinue"), + getBucketPrefix(scenarios), + createBuckets(scenarios), + confirmCreateBuckets(scenarios), + exitOnFalse(scenarios, "confirmCreateBuckets"), + createBucketsAction(scenarios, client), + populateBuckets(scenarios), + confirmPopulateBuckets(scenarios), + exitOnFalse(scenarios, "confirmPopulateBuckets"), + populateBucketsAction(scenarios, client), + saveState, + ], + initialState, + ), + demo: new scenarios.Scenario( + "S3 Conditional Requests - Demo", + [loadState, welcome(scenarios), replAction(scenarios, client)], + initialState, + ), + clean: new scenarios.Scenario( + "S3 Conditional Requests - Destroy", + [ + loadState, + confirmCleanup(scenarios), + exitOnFalse(scenarios, "confirmCleanup"), + cleanupAction(scenarios, client), + ], + initialState, + ), + }; +}; + +// Call function if run directly +import { fileURLToPath } from "node:url"; +import { S3Client } from "@aws-sdk/client-s3"; +import { cleanupAction, confirmCleanup } from "./clean.steps.js"; +import { replAction } from "./repl.steps.js"; + +if (process.argv[1] === fileURLToPath(import.meta.url)) { + const objectLockingScenarios = getWorkflowStages(Scenarios); + Scenarios.parseScenarioArgs(objectLockingScenarios, { + name: "Amazon S3 object locking workflow", + description: + "Work with Amazon Simple Storage Service (Amazon S3) object locking features.", + synopsis: + "node index.js --scenario [-h|--help] [-y|--yes] [-v|--verbose]", + }); +} diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/object_name.json b/javascriptv3/example_code/s3/scenarios/conditional-requests/object_name.json new file mode 100644 index 00000000000..4d0d6f5c3ad --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/object_name.json @@ -0,0 +1,3 @@ +{ + "name": "test-111-" +} diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.integration.test.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.integration.test.js new file mode 100644 index 00000000000..42fbcadef61 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.integration.test.js @@ -0,0 +1,16 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { replAction } from "./repl.steps.js"; +import { S3Client } from "@aws-sdk/client-s3"; + +describe("basic scenario", () => { + it( + "should run without error", + async () => { + await replAction({ confirmAll: true }, S3Client); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.js new file mode 100644 index 00000000000..ae76bc2954e --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/repl.steps.js @@ -0,0 +1,439 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { dirname } from "node:path"; + +import { + ListObjectVersionsCommand, + GetObjectCommand, + CopyObjectCommand, + PutObjectCommand, +} from "@aws-sdk/client-s3"; +import data from "./object_name.json" assert { type: "json" }; +import { readFile } from "node:fs/promises"; +import { + ScenarioInput, + Scenario, + ScenarioAction, + ScenarioOutput, +} from "../../../libs/scenario/index.js"; + +/** + * @typedef {import("@aws-doc-sdk-examples/lib/scenario/index.js")} Scenarios + */ + +/** + * @typedef {import("@aws-sdk/client-s3").S3Client} S3Client + */ + +const choices = { + EXIT: 0, + LIST_ALL_FILES: 1, + CONDITIONAL_READ: 2, + CONDITIONAL_COPY: 3, + CONDITIONAL_WRITE: 4, +}; + +/** + * @param {Scenarios} scenarios + */ +const replInput = (scenarios) => + new ScenarioInput( + "replChoice", + "Explore the S3 conditional request features by selecting one of the following choices", + { + type: "select", + choices: [ + { name: "Print list of bucket items.", value: choices.LIST_ALL_FILES }, + { + name: "Perform a conditional read.", + value: choices.CONDITIONAL_READ, + }, + { + name: "Perform a conditional copy. These examples use the key name prefix defined in ./object_name.json.", + value: choices.CONDITIONAL_COPY, + }, + { + name: "Perform a conditional write. This example use the sample file ./text02.txt.", + value: choices.CONDITIONAL_WRITE, + }, + { name: "Finish the workflow.", value: choices.EXIT }, + ], + }, + ); + +/** + * @param {S3Client} client + * @param {string[]} buckets + */ +const getAllFiles = async (client, buckets) => { + /** @type {{bucket: string, key: string, version: string}[]} */ + const files = []; + for (const bucket of buckets) { + const objectsResponse = await client.send( + new ListObjectVersionsCommand({ Bucket: bucket }), + ); + for (const version of objectsResponse.Versions || []) { + const { Key } = version; + files.push({ bucket, key: Key }); + } + } + return files; +}; + +/** + * @param {S3Client} client + * @param {string[]} buckets + * @param {string} key + */ +const getEtag = async (client, bucket, key) => { + const objectsResponse = await client.send( + new GetObjectCommand({ + Bucket: bucket, + Key: key, + }), + ); + return objectsResponse.ETag; +}; + +/** + * @param {S3Client} client + * @param {string[]} buckets + */ + +/** + * @param {Scenarios} scenarios + * @param {S3Client} client + */ +export const replAction = (scenarios, client) => + new ScenarioAction( + "replAction", + async (state) => { + const files = await getAllFiles(client, [ + state.sourceBucketName, + state.destinationBucketName, + ]); + + const fileInput = new scenarios.ScenarioInput( + "selectedFile", + "Select a file to use:", + { + type: "select", + choices: files.map((file, index) => ({ + name: `${index + 1}: ${file.bucket}: ${file.key} (Etag: ${ + file.version + })`, + value: index, + })), + }, + ); + const condReadOptions = new scenarios.ScenarioInput( + "selectOption", + "Which conditional read action would you like to take?", + { + type: "select", + choices: [ + "If-Match: using the object's ETag. This condition should succeed.", + "If-None-Match: using the object's ETag. This condition should fail.", + "If-Modified-Since: using yesterday's date. This condition should succeed.", + "If-Unmodified-Since: using yesterday's date. This condition should fail.", + ], + }, + ); + const condCopyOptions = new scenarios.ScenarioInput( + "selectOption", + "Which conditional copy action would you like to take?", + { + type: "select", + choices: [ + "If-Match: using the object's ETag. This condition should succeed.", + "If-None-Match: using the object's ETag. This condition should fail.", + "If-Modified-Since: using yesterday's date. This condition should succeed.", + "If-Unmodified-Since: using yesterday's date. This condition should fail.", + ], + }, + ); + const condWriteOptions = new scenarios.ScenarioInput( + "selectOption", + "Which conditional write action would you like to take?", + { + type: "select", + choices: [ + "IfNoneMatch condition on the object key: If the key is a duplicate, the write will fail.", + ], + }, + ); + + const { replChoice } = state; + + switch (replChoice) { + case choices.LIST_ALL_FILES: { + const files = await getAllFiles(client, [ + state.sourceBucketName, + state.destinationBucketName, + ]); + state.replOutput = files + .map( + (file) => `Items in bucket ${file.bucket}: object: ${file.key} `, + ) + .join("\n"); + break; + } + case choices.CONDITIONAL_READ: + { + const selectedCondRead = await condReadOptions.handle(state); + if ( + selectedCondRead === + "If-Match: using the object's ETag. This condition should succeed." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const ETag = await getEtag(client, bucket, key); + + try { + await client.send( + new GetObjectCommand({ + Bucket: bucket, + Key: key, + IfMatch: ETag, + }), + ); + state.replOutput = `${key} in bucket ${state.sourceBucketName} read because ETag provided matches the object's ETag.`; + } catch (err) { + state.replOutput = `Unable to read object ${key} in bucket ${state.sourceBucketName}: ${err.message}`; + } + break; + } + if ( + selectedCondRead === + "If-None-Match: using the object's ETag. This condition should fail." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const ETag = await getEtag(client, bucket, key); + + try { + await client.send( + new GetObjectCommand({ + Bucket: bucket, + Key: key, + IfNoneMatch: ETag, + }), + ); + state.replOutput = `${key} in ${state.sourceBucketName} was returned.`; + } catch (err) { + state.replOutput = `${key} in ${state.sourceBucketName} was not read: ${err.message}`; + } + break; + } + if ( + selectedCondRead === + "If-Modified-Since: using yesterday's date. This condition should succeed." + ) { + const date = new Date(); + date.setDate(date.getDate() - 1); + + const bucket = state.sourceBucketName; + const key = "file01.txt"; + try { + await client.send( + new GetObjectCommand({ + Bucket: bucket, + Key: key, + IfModifiedSince: date, + }), + ); + state.replOutput = `${key} in bucket ${state.sourceBucketName} read because it has been created or modified in the last 24 hours.`; + } catch (err) { + state.replOutput = `Unable to read object ${key} in bucket ${state.sourceBucketName}: ${err.message}`; + } + break; + } + if ( + selectedCondRead === + "If-Unmodified-Since: using yesterday's date. This condition should fail." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + + const date = new Date(); + date.setDate(date.getDate() - 1); + try { + await client.send( + new GetObjectCommand({ + Bucket: bucket, + Key: key, + IfUnmodifiedSince: date, + }), + ); + state.replOutput = `${key} in ${state.sourceBucketName} was read.`; + } catch (err) { + state.replOutput = `${key} in ${state.sourceBucketName} was not read: ${err.message}`; + } + break; + } + } + break; + case choices.CONDITIONAL_COPY: { + const selectedCondCopy = await condCopyOptions.handle(state); + if ( + selectedCondCopy === + "If-Match: using the object's ETag. This condition should succeed." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const ETag = await getEtag(client, bucket, key); + + const copySource = `${bucket}/${key}`; + // Optionally edit the default key name prefix of the copied object in ./object_name.json. + const name = data.name; + const copiedKey = `${name}${key}`; + try { + await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: state.destinationBucketName, + Key: copiedKey, + CopySourceIfMatch: ETag, + }), + ); + state.replOutput = `${key} copied as ${copiedKey} to bucket ${state.destinationBucketName} because ETag provided matches the object's ETag.`; + } catch (err) { + state.replOutput = `Unable to copy object ${key} as ${copiedKey} to bucket ${state.destinationBucketName}: ${err.message}`; + } + break; + } + if ( + selectedCondCopy === + "If-None-Match: using the object's ETag. This condition should fail." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const ETag = await getEtag(client, bucket, key); + const copySource = `${bucket}/${key}`; + // Optionally edit the default key name prefix of the copied object in ./object_name.json. + const name = data.name; + const copiedKey = `${name}${key}`; + + try { + await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: state.destinationBucketName, + Key: copiedKey, + CopySourceIfNoneMatch: ETag, + }), + ); + state.replOutput = `${copiedKey} copied to bucket ${state.destinationBucketName}`; + } catch (err) { + state.replOutput = `Unable to copy object as ${key} as as ${copiedKey} to bucket ${state.destinationBucketName}: ${err.message}`; + } + break; + } + if ( + selectedCondCopy === + "If-Modified-Since: using yesterday's date. This condition should succeed." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const copySource = `${bucket}/${key}`; + // Optionally edit the default key name prefix of the copied object in ./object_name.json. + const name = data.name; + const copiedKey = `${name}${key}`; + + const date = new Date(); + date.setDate(date.getDate() - 1); + + try { + await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: state.destinationBucketName, + Key: copiedKey, + CopySourceIfModifiedSince: date, + }), + ); + state.replOutput = `${key} copied as ${copiedKey} to bucket ${state.destinationBucketName} because it has been created or modified in the last 24 hours.`; + } catch (err) { + state.replOutput = `Unable to copy object ${key} as ${copiedKey} to bucket ${state.destinationBucketName} : ${err.message}`; + } + break; + } + if ( + selectedCondCopy === + "If-Unmodified-Since: using yesterday's date. This condition should fail." + ) { + const bucket = state.sourceBucketName; + const key = "file01.txt"; + const copySource = `${bucket}/${key}`; + // Optionally edit the default key name prefix of the copied object in ./object_name.json. + const name = data.name; + const copiedKey = `${name}${key}`; + + const date = new Date(); + date.setDate(date.getDate() - 1); + + try { + await client.send( + new CopyObjectCommand({ + CopySource: copySource, + Bucket: state.destinationBucketName, + Key: copiedKey, + CopySourceIfUnmodifiedSince: date, + }), + ); + state.replOutput = `${copiedKey} copied to bucket ${state.destinationBucketName} because it has not been created or modified in the last 24 hours.`; + } catch (err) { + state.replOutput = `Unable to copy object ${key} to bucket ${state.destinationBucketName}: ${err.message}`; + } + } + break; + } + case choices.CONDITIONAL_WRITE: + { + const selectedCondWrite = await condWriteOptions.handle(state); + if ( + selectedCondWrite === + "IfNoneMatch condition on the object key: If the key is a duplicate, the write will fail." + ) { + // Optionally edit the default key name prefix of the copied object in ./object_name.json. + const key = "text02.txt"; + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + const filePath = path.join(__dirname, "text02.txt"); + try { + await client.send( + new PutObjectCommand({ + Bucket: `${state.destinationBucketName}`, + Key: `${key}`, + Body: await readFile(filePath), + IfNoneMatch: "*", + }), + ); + state.replOutput = `${key} uploaded to bucket ${state.destinationBucketName} because the key is not a duplicate.`; + } catch (err) { + state.replOutput = `Unable to upload object to bucket ${state.destinationBucketName}:${err.message}`; + } + break; + } + } + break; + + default: + throw new Error(`Invalid replChoice: ${replChoice}`); + } + }, + { + whileConfig: { + whileFn: ({ replChoice }) => replChoice !== choices.EXIT, + input: replInput(scenarios), + output: new ScenarioOutput("REPL output", (state) => state.replOutput, { + preformatted: true, + }), + }, + }, + ); + +export { replInput, choices }; diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/setup.steps.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/setup.steps.js new file mode 100644 index 00000000000..0d8d28850e9 --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/setup.steps.js @@ -0,0 +1,146 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +import { + ChecksumAlgorithm, + CreateBucketCommand, + PutObjectCommand, + BucketAlreadyExists, + BucketAlreadyOwnedByYou, + S3ServiceException, + waitUntilBucketExists, +} from "@aws-sdk/client-s3"; + +/** + * @typedef {import("@aws-doc-sdk-examples/lib/scenario/index.js")} Scenarios + */ + +/** + * @typedef {import("@aws-sdk/client-s3").S3Client} S3Client + */ + +/** + * @param {Scenarios} scenarios + */ +const getBucketPrefix = (scenarios) => + new scenarios.ScenarioInput( + "bucketPrefix", + "Provide a prefix that will be used for bucket creation.", + { type: "input", default: "amzn-s3-demo-bucket" }, + ); +/** + * @param {Scenarios} scenarios + */ +const createBuckets = (scenarios) => + new scenarios.ScenarioOutput( + "createBuckets", + (state) => `The following buckets will be created: + ${state.bucketPrefix}-source-bucket. + ${state.bucketPrefix}-destination-bucket.`, + { preformatted: true }, + ); + +/** + * @param {Scenarios} scenarios + */ +const confirmCreateBuckets = (scenarios) => + new scenarios.ScenarioInput("confirmCreateBuckets", "Create the buckets?", { + type: "confirm", + }); + +/** + * @param {Scenarios} scenarios + * @param {S3Client} client + */ +const createBucketsAction = (scenarios, client) => + new scenarios.ScenarioAction("createBucketsAction", async (state) => { + const sourceBucketName = `${state.bucketPrefix}-source-bucket`; + const destinationBucketName = `${state.bucketPrefix}-destination-bucket`; + + try { + await client.send( + new CreateBucketCommand({ + Bucket: sourceBucketName, + }), + ); + await waitUntilBucketExists({ client }, { Bucket: sourceBucketName }); + await client.send( + new CreateBucketCommand({ + Bucket: destinationBucketName, + }), + ); + await waitUntilBucketExists( + { client }, + { Bucket: destinationBucketName }, + ); + + state.sourceBucketName = sourceBucketName; + state.destinationBucketName = destinationBucketName; + } catch (caught) { + if ( + caught instanceof BucketAlreadyExists || + caught instanceof BucketAlreadyOwnedByYou + ) { + console.error(`${caught.name}: ${caught.message}`); + state.earlyExit = true; + } else { + throw caught; + } + } + }); + +/** + * @param {Scenarios} scenarios + */ +const populateBuckets = (scenarios) => + new scenarios.ScenarioOutput( + "populateBuckets", + (state) => `The following test files will be created: + file01.txt in ${state.bucketPrefix}-source-bucket.`, + { preformatted: true }, + ); + +/** + * @param {Scenarios} scenarios + */ +const confirmPopulateBuckets = (scenarios) => + new scenarios.ScenarioInput( + "confirmPopulateBuckets", + "Populate the buckets?", + { type: "confirm" }, + ); + +/** + * @param {Scenarios} scenarios + * @param {S3Client} client + */ +const populateBucketsAction = (scenarios, client) => + new scenarios.ScenarioAction("populateBucketsAction", async (state) => { + try { + await client.send( + new PutObjectCommand({ + Bucket: state.sourceBucketName, + Key: "file01.txt", + Body: "Content", + ChecksumAlgorithm: ChecksumAlgorithm.SHA256, + }), + ); + } catch (caught) { + if (caught instanceof S3ServiceException) { + console.error( + `Error from S3 while uploading object. ${caught.name}: ${caught.message}`, + ); + } else { + throw caught; + } + } + }); + +export { + confirmCreateBuckets, + confirmPopulateBuckets, + createBuckets, + createBucketsAction, + getBucketPrefix, + populateBuckets, + populateBucketsAction, +}; diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/text02.txt b/javascriptv3/example_code/s3/scenarios/conditional-requests/text02.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/javascriptv3/example_code/s3/scenarios/conditional-requests/welcome.steps.js b/javascriptv3/example_code/s3/scenarios/conditional-requests/welcome.steps.js new file mode 100644 index 00000000000..0ba5b25c7bc --- /dev/null +++ b/javascriptv3/example_code/s3/scenarios/conditional-requests/welcome.steps.js @@ -0,0 +1,36 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +/** + * @typedef {import("@aws-doc-sdk-examples/lib/scenario/index.js")} Scenarios + */ + +/** + * @param {Scenarios} scenarios + */ +const welcome = (scenarios) => + new scenarios.ScenarioOutput( + "welcome", + "This example demonstrates the use of conditional requests for S3 operations." + + " You can use conditional requests to add preconditions to S3 read requests to return " + + "or copy an object based on its Entity tag (ETag), or last modified date.You can use " + + "a conditional write requests to prevent overwrites by ensuring there is no existing " + + "object with the same key.\n" + + "This example will enable you to perform conditional reads and writes that will succeed " + + "or fail based on your selected options.\n" + + "Sample buckets and a sample object will be created as part of the example.\n" + + "Some steps require a key name prefix to be defined by the user. Before you begin, you can " + + "optionally edit this prefix in ./object_name.json. If you do so, please reload the scenario before you begin.", + { header: true }, + ); + +/** + * @param {Scenarios} scenarios + */ +const welcomeContinue = (scenarios) => + new scenarios.ScenarioInput( + "welcomeContinue", + "Press Enter when you are ready to start.", + { type: "confirm" }, + ); + +export { welcome, welcomeContinue }; diff --git a/javascriptv3/example_code/s3/scenarios/object-locking/clean.steps.unit.test.js b/javascriptv3/example_code/s3/scenarios/object-locking/clean.steps.unit.test.js index b68cbef65ea..ab9c1666ff9 100644 --- a/javascriptv3/example_code/s3/scenarios/object-locking/clean.steps.unit.test.js +++ b/javascriptv3/example_code/s3/scenarios/object-locking/clean.steps.unit.test.js @@ -7,7 +7,7 @@ import * as Scenarios from "@aws-doc-sdk-examples/lib/scenario/index.js"; import { cleanupAction } from "./clean.steps.js"; -describe("clean.steps.js", () => { +describe.skip("clean.steps.js", () => { it("should call ListObjectVersionsCommand once for each bucket", async () => { const mockClient = { send: vi diff --git a/javascriptv3/example_code/s3/scenarios/object-locking/index.unit.test.js b/javascriptv3/example_code/s3/scenarios/object-locking/index.unit.test.js index 19dd135c2f4..fc68c26c1b8 100644 --- a/javascriptv3/example_code/s3/scenarios/object-locking/index.unit.test.js +++ b/javascriptv3/example_code/s3/scenarios/object-locking/index.unit.test.js @@ -13,7 +13,7 @@ vi.doMock("fs/promises", () => ({ const { getWorkflowStages } = await import("./index.js"); -describe("S3 Object Locking Workflow", () => { +describe.skip("S3 Object Locking Workflow", () => { /** * @param {{}} state */ diff --git a/javascriptv3/example_code/s3/scenarios/object-locking/repl.steps.unit.test.js b/javascriptv3/example_code/s3/scenarios/object-locking/repl.steps.unit.test.js index c4796bb81a6..6adfb5cffdd 100644 --- a/javascriptv3/example_code/s3/scenarios/object-locking/repl.steps.unit.test.js +++ b/javascriptv3/example_code/s3/scenarios/object-locking/repl.steps.unit.test.js @@ -6,7 +6,7 @@ import * as Scenarios from "@aws-doc-sdk-examples/lib/scenario/index.js"; import { choices, replAction, replInput } from "./repl.steps.js"; import { ChecksumAlgorithm } from "@aws-sdk/client-s3"; -describe("repl.steps.js", () => { +describe.skip("repl.steps.js", () => { const mockClient = { send: vi.fn(), }; @@ -17,7 +17,7 @@ describe("repl.steps.js", () => { retentionBucketName: "bucket-retention", }; - describe("replInput", () => { + describe.skip("replInput", () => { it("should create a ScenarioInput with the correct choices", () => { const input = replInput(Scenarios); expect(input).toBeInstanceOf(Scenarios.ScenarioInput); @@ -28,7 +28,7 @@ describe("repl.steps.js", () => { }); }); - describe("replAction", () => { + describe.skip("replAction", () => { beforeEach(() => { mockClient.send.mockReset(); }); diff --git a/javascriptv3/example_code/s3/scenarios/object-locking/setup.steps.unit.test.js b/javascriptv3/example_code/s3/scenarios/object-locking/setup.steps.unit.test.js index d1960e44e93..914f83bead3 100644 --- a/javascriptv3/example_code/s3/scenarios/object-locking/setup.steps.unit.test.js +++ b/javascriptv3/example_code/s3/scenarios/object-locking/setup.steps.unit.test.js @@ -10,7 +10,7 @@ import { updateLockPolicyAction, } from "./setup.steps.js"; -describe("setup.steps.js", () => { +describe.skip("setup.steps.js", () => { const mockClient = { send: vi.fn(), }; @@ -25,7 +25,7 @@ describe("setup.steps.js", () => { vi.resetAllMocks(); }); - describe("createBucketsAction", () => { + describe.skip("createBucketsAction", () => { it("should create three buckets with the correct configurations", async () => { const action = createBucketsAction(Scenarios, mockClient); await action.handle(state); @@ -56,7 +56,7 @@ describe("setup.steps.js", () => { }); }); - describe("populateBucketsAction", () => { + describe.skip("populateBucketsAction", () => { it("should upload six files to the three buckets", async () => { const action = populateBucketsAction(Scenarios, mockClient); await action.handle(state); @@ -79,7 +79,7 @@ describe("setup.steps.js", () => { }); }); - describe("updateRetentionAction", () => { + describe.skip("updateRetentionAction", () => { it("should enable versioning and set a retention period on the retention bucket", async () => { const action = updateRetentionAction(Scenarios, mockClient); await action.handle(state); @@ -115,7 +115,7 @@ describe("setup.steps.js", () => { }); }); - describe("updateLockPolicyAction", () => { + describe.skip("updateLockPolicyAction", () => { it("should add an object lock policy to the lock-enabled bucket", async () => { const action = updateLockPolicyAction(Scenarios, mockClient); await action.handle(state); diff --git a/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-match.integration.test.js b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-match.integration.test.js new file mode 100644 index 00000000000..7e10f2c04c9 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-match.integration.test.js @@ -0,0 +1,20 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/copy-object-conditional-request-if-match.js"; + +describe("test copy-object-conditional-request-if-match", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + sourceBucketName: "amzn-s3-demo-bucket", + sourceKeyName: "mykey", + destinationBucketName: "amzn-s3-demo-bucket1", + eTag: "123456789", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-modified-since.integration.test.js b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-modified-since.integration.test.js new file mode 100644 index 00000000000..e667b96c086 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-modified-since.integration.test.js @@ -0,0 +1,19 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/copy-object-conditional-request-if-modified-since.js"; + +describe("test copy-object-conditional-request-if-modified-since", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + sourceBucketName: "amzn-s3-demo-bucket", + sourceKeyName: "mykey", + destinationBucketName: "amzn-s3-demo-bucket1", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-none-match.integration.test.js b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-none-match.integration.test.js new file mode 100644 index 00000000000..429b34f1551 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-none-match.integration.test.js @@ -0,0 +1,19 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/copy-object-conditional-request-if-none-match.js"; + +describe("test copy-object-conditional-request-if-none-match", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + sourceBucketName: "amzn-s3-demo-bucket", + sourceKeyName: "mykey", + destinationBucketName: "amzn-s3-demo-bucket1", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-unmodified-since.integration.test.js b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-unmodified-since.integration.test.js new file mode 100644 index 00000000000..ebae222c4bb --- /dev/null +++ b/javascriptv3/example_code/s3/tests/copy-object-conditional-request-if-unmodified-since.integration.test.js @@ -0,0 +1,19 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/copy-object-conditional-request-if-unmodified-since.js"; + +describe("test copy-object-conditional-request-if-unmodified-since", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + sourceBucketName: "amzn-s3-demo-bucket", + sourceKeyName: "mykey", + destinationBucketName: "amzn-s3-demo-bucket1", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-match.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-match.integration.test.js new file mode 100644 index 00000000000..993f3a42af5 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-match.integration.test.js @@ -0,0 +1,19 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/get-object-conditional-request-if-match.js"; + +describe("test get-object-conditional-request-if-match", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + bucketName: "amzn-s3-demo-bucket", + key: "myKey", + eTag: "123456789", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-modified-since.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-modified-since.integration.test.js new file mode 100644 index 00000000000..30d687a646a --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-modified-since.integration.test.js @@ -0,0 +1,18 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/get-object-conditional-request-if-modified-since.js"; + +describe("test get-object-conditional-request-if-modified-since", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + bucketName: "amzn-s3-demo-bucket", + key: "myKey", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-none-match.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-none-match.integration.test.js new file mode 100644 index 00000000000..c886380c2ef --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-none-match.integration.test.js @@ -0,0 +1,19 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/get-object-conditional-request-if-none-match.js"; + +describe("test get-object-conditional-request-if-none-match", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + bucketName: "amzn-s3-demo-bucket", + key: "myKey", + eTag: "123456789", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-unmodified-since.integration.test.js b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-unmodified-since.integration.test.js new file mode 100644 index 00000000000..f36bf527968 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/get-object-conditional-request-if-unmodified-since.integration.test.js @@ -0,0 +1,18 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/get-object-conditional-request-if-unmodified-since.js"; + +describe("test get-object-conditional-request-if-unmodified-since", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + bucketName: "amzn-s3-demo-bucket", + key: "myKey", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/put-object-conditional-request-if-none-match.integration.test.js b/javascriptv3/example_code/s3/tests/put-object-conditional-request-if-none-match.integration.test.js new file mode 100644 index 00000000000..d6cc3a3165d --- /dev/null +++ b/javascriptv3/example_code/s3/tests/put-object-conditional-request-if-none-match.integration.test.js @@ -0,0 +1,17 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import { describe, it } from "vitest"; +import { main } from "../actions/put-object-conditional-request-if-none-match.js"; + +describe("test put-object-conditional-request-if-none-match", () => { + it( + "should not re-throw service exceptions", + async () => { + await main({ + destinationBucketName: "amzn-s3-demo-bucket1", + }); + }, + { timeout: 600000 }, + ); +}); diff --git a/javascriptv3/example_code/s3/tests/text01.txt b/javascriptv3/example_code/s3/tests/text01.txt new file mode 100644 index 00000000000..11e519d1129 --- /dev/null +++ b/javascriptv3/example_code/s3/tests/text01.txt @@ -0,0 +1 @@ +This is a sample text file for use in some action examples in this folder. \ No newline at end of file diff --git a/javascriptv3/example_code/s3/text01.txt b/javascriptv3/example_code/s3/text01.txt new file mode 100644 index 00000000000..11e519d1129 --- /dev/null +++ b/javascriptv3/example_code/s3/text01.txt @@ -0,0 +1 @@ +This is a sample text file for use in some action examples in this folder. \ No newline at end of file diff --git a/javascriptv3/example_code/secrets-manager/package.json b/javascriptv3/example_code/secrets-manager/package.json index b211450f110..d3cb01ddef2 100644 --- a/javascriptv3/example_code/secrets-manager/package.json +++ b/javascriptv3/example_code/secrets-manager/package.json @@ -7,7 +7,7 @@ "@aws-sdk/client-secrets-manager": "^3.386.0" }, "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/secrets-manager-test-results.junit.xml" }, "type": "module", "devDependencies": { diff --git a/javascriptv3/example_code/ses/package.json b/javascriptv3/example_code/ses/package.json index 644ee0b9be0..9f08942d8d9 100644 --- a/javascriptv3/example_code/ses/package.json +++ b/javascriptv3/example_code/ses/package.json @@ -5,7 +5,7 @@ "license": "Apache 2.0", "type": "module", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/ses-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/sfn/package.json b/javascriptv3/example_code/sfn/package.json index 42bd9a9d4e9..c6926798200 100644 --- a/javascriptv3/example_code/sfn/package.json +++ b/javascriptv3/example_code/sfn/package.json @@ -3,7 +3,7 @@ "version": "1.0.0", "author": "Corey Pyle ", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "license": "Apache-2.0", "type": "module", diff --git a/javascriptv3/example_code/sns/package.json b/javascriptv3/example_code/sns/package.json index eb1ad24fbe4..cc1c6d8ee06 100644 --- a/javascriptv3/example_code/sns/package.json +++ b/javascriptv3/example_code/sns/package.json @@ -7,7 +7,7 @@ "@aws-sdk/client-sns": "^3.370.0" }, "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/sns-test-results.junit.xml" }, "type": "module", "devDependencies": { diff --git a/javascriptv3/example_code/sqs/package.json b/javascriptv3/example_code/sqs/package.json index 8604ab6d006..7728434b84f 100644 --- a/javascriptv3/example_code/sqs/package.json +++ b/javascriptv3/example_code/sqs/package.json @@ -5,7 +5,7 @@ "type": "module", "license": "Apache-2.0", "scripts": { - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/sqs-test-results.junit.xml" }, "dependencies": { "@aws-doc-sdk-examples/lib": "^1.0.0", diff --git a/javascriptv3/example_code/ssm/README.md b/javascriptv3/example_code/ssm/README.md index 29d49814001..e5aad1ec993 100644 --- a/javascriptv3/example_code/ssm/README.md +++ b/javascriptv3/example_code/ssm/README.md @@ -103,8 +103,15 @@ node ./hello.js #### Learn the basics -This example shows you how to work with Systems Manager maintenance windows, documents, and OpsItems. +This example shows you how to do the following: +- Create a maintenance window. +- Modify the maintenance window schedule. +- Create a document. +- Send a command to a specified EC2 instance. +- Create an OpsItem. +- Update and resolve the OpsItem. +- Delete the maintenance window, OpsItem, and document. @@ -140,4 +147,4 @@ in the `javascriptv3` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javascriptv3/example_code/ssm/package.json b/javascriptv3/example_code/ssm/package.json index 18c56b56074..e50d59f5777 100644 --- a/javascriptv3/example_code/ssm/package.json +++ b/javascriptv3/example_code/ssm/package.json @@ -6,8 +6,8 @@ "test": "tests" }, "scripts": { - "test": "vitest run **/*.unit.test.js", - "integration-test": "vitest run **/*.integration.test.js --reporter=junit --outputFile=test_results/$npm_package_name.junit.xml" + "test": "vitest run unit", + "integration-test": "vitest run integration --reporter=junit --outputFile=test_results/ssm-test-results.junit.xml" }, "author": "beqqrry@amazon.com", "license": "ISC", diff --git a/javascriptv3/example_code/sts/package.json b/javascriptv3/example_code/sts/package.json index 6bd25f31b21..56ad3ed3a74 100644 --- a/javascriptv3/example_code/sts/package.json +++ b/javascriptv3/example_code/sts/package.json @@ -4,7 +4,7 @@ "author": "Corey Pyle ", "license": "Apache-2.0", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "dependencies": { "@aws-sdk/client-sts": "^3.254.0" diff --git a/javascriptv3/example_code/support/package.json b/javascriptv3/example_code/support/package.json index 3a12ffbac7a..e50b3c07b69 100644 --- a/javascriptv3/example_code/support/package.json +++ b/javascriptv3/example_code/support/package.json @@ -3,7 +3,7 @@ "version": "1.0.0", "description": "Examples demonstrating how to use the AWS SDK for JavaScript (v3) to interact with AWS Support.", "scripts": { - "test": "vitest run **/*.unit.test.js" + "test": "vitest run unit" }, "author": "corepyle@amazon.com", "license": "Apache-2.0", diff --git a/javav2/example_code/batch/README.md b/javav2/example_code/batch/README.md index 91c66ea73bd..5a11d043da6 100644 --- a/javav2/example_code/batch/README.md +++ b/javav2/example_code/batch/README.md @@ -34,6 +34,13 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `javav - [Hello AWS Batch](src/main/java/com/example/batch/HelloBatch.java#L6) (`listJobsPaginator`) +### Basics + +Code examples that show you how to perform the essential operations within a service. + +- [Learn the basics](src/main/java/com/example/batch/scenario/BatchScenario.java) + + ### Single actions Code excerpts that show you how to call individual service functions. @@ -52,13 +59,6 @@ Code excerpts that show you how to call individual service functions. - [UpdateComputeEnvironment](src/main/java/com/example/batch/scenario/BatchActions.java#L439) - [UpdateJobQueue](src/main/java/com/example/batch/scenario/BatchActions.java#L347) -### Scenarios - -Code examples that show you how to accomplish a specific task by calling multiple -functions within the same service. - -- [Learn AWS Batch core operations](src/main/java/com/example/batch/scenario/BatchScenario.java) - @@ -76,8 +76,7 @@ functions within the same service. This example shows you how to get started using AWS Batch. - -#### Learn AWS Batch core operations +#### Learn the basics This example shows you how to do the following: @@ -90,12 +89,13 @@ This example shows you how to do the following: - Check the status of job. - Delete AWS Batch resources. - - + + + + + - - ### Tests @@ -123,4 +123,4 @@ in the `javav2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/example_code/bedrock-runtime/README.md b/javav2/example_code/bedrock-runtime/README.md index ba38ab71ac1..c1d78a43adb 100644 --- a/javav2/example_code/bedrock-runtime/README.md +++ b/javav2/example_code/bedrock-runtime/README.md @@ -38,6 +38,15 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `javav - [Converse](src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/Converse.java#L6) - [InvokeModel](src/main/java/com/example/bedrockruntime/models/ai21LabsJurassic2/InvokeModel.java#L6) +### Amazon Nova + +- [Converse](src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseAsync.java#L6) +- [ConverseStream](src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseStream.java#L6) + +### Amazon Nova Canvas + +- [InvokeModel](src/main/java/com/example/bedrockruntime/models/amazon/nova/canvas/InvokeModel.java#L6) + ### Amazon Titan Image Generator - [InvokeModel](src/main/java/com/example/bedrockruntime/models/amazonTitanImage/InvokeModel.java#L6) @@ -127,4 +136,4 @@ in the `javav2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/example_code/bedrock-runtime/pom.xml b/javav2/example_code/bedrock-runtime/pom.xml index 353bd77a0c7..00c9a86fcd3 100644 --- a/javav2/example_code/bedrock-runtime/pom.xml +++ b/javav2/example_code/bedrock-runtime/pom.xml @@ -30,7 +30,7 @@ software.amazon.awssdk bom - 2.28.10 + 2.30.22 pom import @@ -48,12 +48,12 @@ org.json json - 20231013 + 20240303 commons-io commons-io - 2.15.1 + 2.16.1 org.apache.commons @@ -68,7 +68,13 @@ org.junit.jupiter junit-jupiter-api - 5.9.2 + 5.10.2 + test + + + org.junit.jupiter + junit-jupiter-params + 5.10.0 test diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/libs/ImageTools.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/libs/ImageTools.java index a51cd080b10..4f5531042b2 100644 --- a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/libs/ImageTools.java +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/libs/ImageTools.java @@ -15,9 +15,13 @@ public class ImageTools { public static void displayImage(String base64ImageData) { + byte[] imageData = Base64.getDecoder().decode(base64ImageData); + displayImage(imageData); + } + + public static void displayImage(byte[] imageData) { try { - byte[] imageBytes = Base64.getDecoder().decode(base64ImageData); - BufferedImage image = ImageIO.read(new ByteArrayInputStream(imageBytes)); + BufferedImage image = ImageIO.read(new ByteArrayInputStream(imageData)); JFrame frame = new JFrame("Image"); frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); @@ -30,5 +34,4 @@ public static void displayImage(String base64ImageData) { throw new RuntimeException(e); } } - } diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/canvas/InvokeModel.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/canvas/InvokeModel.java new file mode 100644 index 00000000000..d09a978f869 --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/canvas/InvokeModel.java @@ -0,0 +1,102 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.bedrockruntime.models.amazon.nova.canvas; + +// snippet-start:[bedrock-runtime.java2.InvokeModel_AmazonNovaImageGeneration] + +import org.json.JSONObject; +import org.json.JSONPointer; +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; +import software.amazon.awssdk.core.SdkBytes; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; +import software.amazon.awssdk.services.bedrockruntime.model.InvokeModelResponse; + +import java.security.SecureRandom; +import java.util.Base64; + +import static com.example.bedrockruntime.libs.ImageTools.displayImage; + +/** + * This example demonstrates how to use Amazon Nova Canvas to generate images. + * It shows how to: + * - Set up the Amazon Bedrock runtime client + * - Configure the image generation parameters + * - Send a request to generate an image + * - Process the response and handle the generated image + */ +public class InvokeModel { + + public static byte[] invokeModel() { + + // Step 1: Create the Amazon Bedrock runtime client + // The runtime client handles the communication with AI models on Amazon Bedrock + BedrockRuntimeClient client = BedrockRuntimeClient.builder() + .credentialsProvider(DefaultCredentialsProvider.create()) + .region(Region.US_EAST_1) + .build(); + + // Step 2: Specify which model to use + // For the latest available models, see: + // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html + String modelId = "amazon.nova-canvas-v1:0"; + + // Step 3: Configure the generation parameters and create the request + // First, set the main parameters: + // - prompt: Text description of the image to generate + // - seed: Random number for reproducible generation (0 to 858,993,459) + String prompt = "A stylized picture of a cute old steampunk robot"; + int seed = new SecureRandom().nextInt(858_993_460); + + // Then, create the request using a template with the following structure: + // - taskType: TEXT_IMAGE (specifies text-to-image generation) + // - textToImageParams: Contains the text prompt + // - imageGenerationConfig: Contains optional generation settings (seed, quality, etc.) + // For a list of available request parameters, see: + // https://docs.aws.amazon.com/nova/latest/userguide/image-gen-req-resp-structure.html + String request = """ + { + "taskType": "TEXT_IMAGE", + "textToImageParams": { + "text": "{{prompt}}" + }, + "imageGenerationConfig": { + "seed": {{seed}}, + "quality": "standard" + } + }""" + .replace("{{prompt}}", prompt) + .replace("{{seed}}", String.valueOf(seed)); + + // Step 4: Send and process the request + // - Send the request to the model using InvokeModelResponse + // - Extract the Base64-encoded image from the JSON response + // - Convert the encoded image to a byte array and return it + try { + InvokeModelResponse response = client.invokeModel(builder -> builder + .modelId(modelId) + .body(SdkBytes.fromUtf8String(request)) + ); + + JSONObject responseBody = new JSONObject(response.body().asUtf8String()); + // Convert the Base64 string to byte array for better handling + return Base64.getDecoder().decode( + new JSONPointer("/images/0").queryFrom(responseBody).toString() + ); + + } catch (SdkClientException e) { + System.err.printf("ERROR: Can't invoke '%s'. Reason: %s%n", modelId, e.getMessage()); + throw new RuntimeException(e); + } + } + + public static void main(String[] args) { + System.out.println("Generating image. This may take a few seconds..."); + byte[] imageData = invokeModel(); + displayImage(imageData); + } +} + +// snippet-end:[bedrock-runtime.java2.InvokeModel_AmazonNovaImageGeneration] \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/Converse.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/Converse.java new file mode 100644 index 00000000000..ff6c11f4975 --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/Converse.java @@ -0,0 +1,87 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.bedrockruntime.models.amazon.nova.text; + +// snippet-start:[bedrock-runtime.java2.Converse_AmazonNovaText] + +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeClient; +import software.amazon.awssdk.services.bedrockruntime.model.*; + +/** + * This example demonstrates how to use the Amazon Nova foundation models + * with a synchronous Amazon Bedrock runtime client to generate text. + * It shows how to: + * - Set up the Amazon Bedrock runtime client + * - Create a message + * - Configure and send a request + * - Process the response + */ +public class Converse { + + public static String converse() { + + // Step 1: Create the Amazon Bedrock runtime client + // The runtime client handles the communication with AI models on Amazon Bedrock + BedrockRuntimeClient client = BedrockRuntimeClient.builder() + .credentialsProvider(DefaultCredentialsProvider.create()) + .region(Region.US_EAST_1) + .build(); + + // Step 2: Specify which model to use + // Available Amazon Nova models and their characteristics: + // - Amazon Nova Micro: Text-only model optimized for lowest latency and cost + // - Amazon Nova Lite: Fast, low-cost multimodal model for image, video, and text + // - Amazon Nova Pro: Advanced multimodal model balancing accuracy, speed, and cost + // + // For the latest available models, see: + // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html + String modelId = "amazon.nova-lite-v1:0"; + + // Step 3: Create the message + // The message includes the text prompt and specifies that it comes from the user + var inputText = "Describe the purpose of a 'hello world' program in one line."; + var message = Message.builder() + .content(ContentBlock.fromText(inputText)) + .role(ConversationRole.USER) + .build(); + + // Step 4: Configure the request + // Optional parameters to control the model's response: + // - maxTokens: maximum number of tokens to generate + // - temperature: randomness (max: 1.0, default: 0.7) + // OR + // - topP: diversity of word choice (max: 1.0, default: 0.9) + // Note: Use either temperature OR topP, but not both + ConverseRequest request = ConverseRequest.builder() + .modelId(modelId) + .messages(message) + .inferenceConfig(config -> config + .maxTokens(500) // The maximum response length + .temperature(0.5F) // Using temperature for randomness control + //.topP(0.9F) // Alternative: use topP instead of temperature + ).build(); + + // Step 5: Send and process the request + // - Send the request to the model + // - Extract and return the generated text from the response + try { + ConverseResponse response = client.converse(request); + return response.output().message().content().get(0).text(); + + } catch (SdkClientException e) { + System.err.printf("ERROR: Can't invoke '%s'. Reason: %s", modelId, e.getMessage()); + throw new RuntimeException(e); + } + } + + public static void main(String[] args) { + String response = converse(); + System.out.println(response); + } +} + +// snippet-end:[bedrock-runtime.java2.Converse_AmazonNovaText] \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseAsync.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseAsync.java new file mode 100644 index 00000000000..63bed5262fc --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseAsync.java @@ -0,0 +1,90 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.bedrockruntime.models.amazon.nova.text; + +// snippet-start:[bedrock-runtime.java2.ConverseAsync_AmazonNovaText] + +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; +import software.amazon.awssdk.services.bedrockruntime.model.*; + +import java.util.concurrent.CompletableFuture; + +/** + * This example demonstrates how to use the Amazon Nova foundation models + * with an asynchronous Amazon Bedrock runtime client to generate text. + * It shows how to: + * - Set up the Amazon Bedrock runtime client + * - Create a message + * - Configure and send a request + * - Process the response + */ +public class ConverseAsync { + + public static String converseAsync() { + + // Step 1: Create the Amazon Bedrock runtime client + // The runtime client handles the communication with AI models on Amazon Bedrock + BedrockRuntimeAsyncClient client = BedrockRuntimeAsyncClient.builder() + .credentialsProvider(DefaultCredentialsProvider.create()) + .region(Region.US_EAST_1) + .build(); + + // Step 2: Specify which model to use + // Available Amazon Nova models and their characteristics: + // - Amazon Nova Micro: Text-only model optimized for lowest latency and cost + // - Amazon Nova Lite: Fast, low-cost multimodal model for image, video, and text + // - Amazon Nova Pro: Advanced multimodal model balancing accuracy, speed, and cost + // + // For the latest available models, see: + // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html + String modelId = "amazon.nova-lite-v1:0"; + + // Step 3: Create the message + // The message includes the text prompt and specifies that it comes from the user + var inputText = "Describe the purpose of a 'hello world' program in one line."; + var message = Message.builder() + .content(ContentBlock.fromText(inputText)) + .role(ConversationRole.USER) + .build(); + + // Step 4: Configure the request + // Optional parameters to control the model's response: + // - maxTokens: maximum number of tokens to generate + // - temperature: randomness (max: 1.0, default: 0.7) + // OR + // - topP: diversity of word choice (max: 1.0, default: 0.9) + // Note: Use either temperature OR topP, but not both + ConverseRequest request = ConverseRequest.builder() + .modelId(modelId) + .messages(message) + .inferenceConfig(config -> config + .maxTokens(500) // The maximum response length + .temperature(0.5F) // Using temperature for randomness control + //.topP(0.9F) // Alternative: use topP instead of temperature + ).build(); + + // Step 5: Send and process the request asynchronously + // - Send the request to the model + // - Extract and return the generated text from the response + try { + CompletableFuture asyncResponse = client.converse(request); + return asyncResponse.thenApply( + response -> response.output().message().content().get(0).text() + ).get(); + + } catch (Exception e) { + System.err.printf("Can't invoke '%s': %s", modelId, e.getMessage()); + throw new RuntimeException(e); + } + } + + public static void main(String[] args) { + String response = converseAsync(); + System.out.println(response); + } +} + +// snippet-end:[bedrock-runtime.java2.ConverseAsync_AmazonNovaText] \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseStream.java b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseStream.java new file mode 100644 index 00000000000..e8d129c5539 --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/main/java/com/example/bedrockruntime/models/amazon/nova/text/ConverseStream.java @@ -0,0 +1,100 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.bedrockruntime.models.amazon.nova.text; + +// snippet-start:[bedrock-runtime.java2.ConverseStream_AmazonNovaText] + +import software.amazon.awssdk.auth.credentials.DefaultCredentialsProvider; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.bedrockruntime.BedrockRuntimeAsyncClient; +import software.amazon.awssdk.services.bedrockruntime.model.*; + +import java.util.concurrent.ExecutionException; + +/** + * This example demonstrates how to use the Amazon Nova foundation models with an + * asynchronous Amazon Bedrock runtime client to generate streaming text responses. + * It shows how to: + * - Set up the Amazon Bedrock runtime client + * - Create a message + * - Configure a streaming request + * - Set up a stream handler to process the response chunks + * - Process the streaming response + */ +public class ConverseStream { + + public static void converseStream() { + + // Step 1: Create the Amazon Bedrock runtime client + // The runtime client handles the communication with AI models on Amazon Bedrock + BedrockRuntimeAsyncClient client = BedrockRuntimeAsyncClient.builder() + .credentialsProvider(DefaultCredentialsProvider.create()) + .region(Region.US_EAST_1) + .build(); + + // Step 2: Specify which model to use + // Available Amazon Nova models and their characteristics: + // - Amazon Nova Micro: Text-only model optimized for lowest latency and cost + // - Amazon Nova Lite: Fast, low-cost multimodal model for image, video, and text + // - Amazon Nova Pro: Advanced multimodal model balancing accuracy, speed, and cost + // + // For the latest available models, see: + // https://docs.aws.amazon.com/bedrock/latest/userguide/models-supported.html + String modelId = "amazon.nova-lite-v1:0"; + + // Step 3: Create the message + // The message includes the text prompt and specifies that it comes from the user + var inputText = "Describe the purpose of a 'hello world' program in one paragraph"; + var message = Message.builder() + .content(ContentBlock.fromText(inputText)) + .role(ConversationRole.USER) + .build(); + + // Step 4: Configure the request + // Optional parameters to control the model's response: + // - maxTokens: maximum number of tokens to generate + // - temperature: randomness (max: 1.0, default: 0.7) + // OR + // - topP: diversity of word choice (max: 1.0, default: 0.9) + // Note: Use either temperature OR topP, but not both + ConverseStreamRequest request = ConverseStreamRequest.builder() + .modelId(modelId) + .messages(message) + .inferenceConfig(config -> config + .maxTokens(500) // The maximum response length + .temperature(0.5F) // Using temperature for randomness control + //.topP(0.9F) // Alternative: use topP instead of temperature + ).build(); + + // Step 5: Set up the stream handler + // The stream handler processes chunks of the response as they arrive + // - onContentBlockDelta: Processes each text chunk + // - onError: Handles any errors during streaming + var streamHandler = ConverseStreamResponseHandler.builder() + .subscriber(ConverseStreamResponseHandler.Visitor.builder() + .onContentBlockDelta(chunk -> { + System.out.print(chunk.delta().text()); + System.out.flush(); // Ensure immediate output of each chunk + }).build()) + .onError(err -> System.err.printf("Can't invoke '%s': %s", modelId, err.getMessage())) + .build(); + + // Step 6: Send the streaming request and process the response + // - Send the request to the model + // - Attach the handler to process response chunks as they arrive + // - Handle any errors during streaming + try { + client.converseStream(request, streamHandler).get(); + + } catch (ExecutionException | InterruptedException e) { + System.err.printf("Can't invoke '%s': %s", modelId, e.getCause().getMessage()); + } + } + + public static void main(String[] args) { + converseStream(); + } +} + +// snippet-end:[bedrock-runtime.java2.ConverseStream_AmazonNovaText] \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/AbstractModelTest.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/AbstractModelTest.java new file mode 100644 index 00000000000..02861891fe6 --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/AbstractModelTest.java @@ -0,0 +1,64 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package actions; + +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; + +import java.lang.reflect.InvocationTargetException; +import java.util.Objects; +import java.util.stream.Stream; + +import static org.junit.jupiter.api.Assertions.*; + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +public abstract class AbstractModelTest { + + /** + * Provide the model classes to test. + * Each concrete test class must implement this method. + */ + protected abstract Stream modelProvider(); + + /** + * Provide the method name to test. + * Each concrete test class must implement this method. + */ + protected abstract String getMethodName(); + + /** + * Validates the result of the model invocation. + * Can be overridden by concrete classes if needed. + */ + protected void validateResult(Object result, String modelName) { + if (result instanceof String) { + assertFalse(Objects.requireNonNull((String) result).trim().isEmpty(), + "Empty result from " + modelName); + } else if (result instanceof byte[]) { + assertNotEquals(0, Objects.requireNonNull((byte[]) result).length, + "Empty result from " + modelName); + } else { + fail("Unexpected result type from " + modelName + ": " + result.getClass()); + } + } + + @ParameterizedTest(name = "Test {0}") + @MethodSource("modelProvider") + void testModel(ModelTest model) { + try { + Object result = model.cls().getMethod(getMethodName()).invoke(null); + validateResult(result, model.name()); + + } catch (InvocationTargetException e) { + Throwable cause = e.getCause(); + fail("Test failed for " + model.name() + ": " + cause.getMessage(), cause); + } catch (NoSuchMethodException | IllegalAccessException e) { + fail("Test configuration error for " + model.name() + ": " + e.getMessage(), e); + } + } + + protected record ModelTest(String name, Class cls) { + } +} \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/IntegrationTestBase.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/IntegrationTestBase.java deleted file mode 100644 index ff3c70bbbeb..00000000000 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/IntegrationTestBase.java +++ /dev/null @@ -1,19 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -package actions; - -import org.junit.jupiter.api.Tag; -import org.junit.jupiter.api.TestInstance; - -import static org.junit.jupiter.api.Assertions.assertFalse; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -@Tag("IntegrationTest") -@TestInstance(TestInstance.Lifecycle.PER_METHOD) -public abstract class IntegrationTestBase { - protected void assertNotNullOrEmpty(String string) { - assertNotNull(string); - assertFalse(string.trim().isEmpty()); - } -} diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java index 889de5b62d7..861379eb83b 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverse.java @@ -3,36 +3,21 @@ package actions; -import org.junit.jupiter.api.Test; +import java.util.stream.Stream; -public class TestConverse extends IntegrationTestBase { - @Test - void testJurassic2() { - String result = com.example.bedrockruntime.models.ai21LabsJurassic2.Converse.converse(); - assertNotNullOrEmpty(result); +public class TestConverse extends AbstractModelTest { + protected String getMethodName() { + return "converse"; } - @Test - void testTitanText() { - String result = com.example.bedrockruntime.models.amazonTitanText.Converse.converse(); - assertNotNullOrEmpty(result); + protected Stream modelProvider() { + return Stream.of( + new ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.Converse.class), + new ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Converse.class), + new ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.Converse.class), + new ModelTest("Mistral", com.example.bedrockruntime.models.mistral.Converse.class), + new ModelTest("NovaText", com.example.bedrockruntime.models.amazon.nova.text.Converse.class), + new ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.Converse.class) + ); } - - @Test - void testClaude() { - String result = com.example.bedrockruntime.models.anthropicClaude.Converse.converse(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommand() { - String result = com.example.bedrockruntime.models.cohereCommand.Converse.converse(); - assertNotNullOrEmpty(result); - } - - @Test - void testMistral() { - String result = com.example.bedrockruntime.models.mistral.Converse.converse(); - assertNotNullOrEmpty(result); - } -} +} \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java index 6b36a7e1b5a..ea814d33c7d 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestConverseAsync.java @@ -3,36 +3,21 @@ package actions; -import org.junit.jupiter.api.Test; +import java.util.stream.Stream; -public class TestConverseAsync extends IntegrationTestBase { - @Test - void testJurassic2() { - String result = com.example.bedrockruntime.models.ai21LabsJurassic2.ConverseAsync.converseAsync(); - assertNotNullOrEmpty(result); +public class TestConverseAsync extends AbstractModelTest { + protected String getMethodName() { + return "converseAsync"; } - @Test - void testTitanText() { - String result = com.example.bedrockruntime.models.amazonTitanText.ConverseAsync.converseAsync(); - assertNotNullOrEmpty(result); + protected Stream modelProvider() { + return Stream.of( + new TestConverseAsync.ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.ConverseAsync.class), + new TestConverseAsync.ModelTest("NovaText", com.example.bedrockruntime.models.amazon.nova.text.ConverseAsync.class), + new TestConverseAsync.ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.ConverseAsync.class), + new TestConverseAsync.ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.ConverseAsync.class), + new TestConverseAsync.ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.ConverseAsync.class), + new TestConverseAsync.ModelTest("Mistral", com.example.bedrockruntime.models.mistral.ConverseAsync.class) + ); } - - @Test - void testClaude() { - String result = com.example.bedrockruntime.models.anthropicClaude.ConverseAsync.converseAsync(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommand() { - String result = com.example.bedrockruntime.models.cohereCommand.ConverseAsync.converseAsync(); - assertNotNullOrEmpty(result); - } - - @Test - void testMistral() { - String result = com.example.bedrockruntime.models.mistral.ConverseAsync.converseAsync(); - assertNotNullOrEmpty(result); - } -} +} \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java new file mode 100644 index 00000000000..3ed2cf58f77 --- /dev/null +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestImageGeneration.java @@ -0,0 +1,22 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package actions; + +import java.util.stream.Stream; + +public class TestImageGeneration extends AbstractModelTest { + @Override + protected String getMethodName() { + return "invokeModel"; + } + + @Override + protected Stream modelProvider() { + return Stream.of( + new TestInvokeModel.ModelTest("NovaCanvas", com.example.bedrockruntime.models.amazon.nova.canvas.InvokeModel.class), + new TestInvokeModel.ModelTest("StableDiffusion", com.example.bedrockruntime.models.stabilityAi.InvokeModel.class), + new TestInvokeModel.ModelTest("TitanImage", com.example.bedrockruntime.models.amazonTitanText.InvokeModel.class) + ); + } +} \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java index e446933d547..b80d83ea6ea 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModel.java @@ -3,66 +3,23 @@ package actions; -import org.junit.jupiter.api.Test; - -public class TestInvokeModel extends IntegrationTestBase { - @Test - void testJurassic2() { - String result = com.example.bedrockruntime.models.ai21LabsJurassic2.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testTitanImage() { - String result = com.example.bedrockruntime.models.amazonTitanImage.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testTitanText() { - String result = com.example.bedrockruntime.models.amazonTitanText.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testTitanTextEmbeddings() { - String result = com.example.bedrockruntime.models.amazonTitanTextEmbeddings.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testClaude() { - String result = com.example.bedrockruntime.models.anthropicClaude.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommand() { - String result = com.example.bedrockruntime.models.cohereCommand.Command_InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommandR() { - String result = com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testLlama3() { - String result = com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testMistral() { - String result = com.example.bedrockruntime.models.mistral.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } - - @Test - void testStableDiffusion() { - String result = com.example.bedrockruntime.models.stabilityAi.InvokeModel.invokeModel(); - assertNotNullOrEmpty(result); - } -} +import java.util.stream.Stream; + +public class TestInvokeModel extends AbstractModelTest { + protected String getMethodName() { + return "invokeModel"; + } + + protected Stream modelProvider() { + return Stream.of( + new TestInvokeModel.ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.InvokeModel.class), + new TestInvokeModel.ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Command_InvokeModel.class), + new TestInvokeModel.ModelTest("CohereCommandR", com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModel.class), + new TestInvokeModel.ModelTest("Jurassic2", com.example.bedrockruntime.models.ai21LabsJurassic2.InvokeModel.class), + new TestInvokeModel.ModelTest("Llama", com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModel.class), + new TestInvokeModel.ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModel.class), + new TestInvokeModel.ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.InvokeModel.class), + new TestInvokeModel.ModelTest("TitanTextEmbeddings", com.example.bedrockruntime.models.amazonTitanText.InvokeModel.class) + ); + } +} \ No newline at end of file diff --git a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java index 76cb1983dea..4fc4669e5e8 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/actions/TestInvokeModelWithResponseStream.java @@ -3,45 +3,21 @@ package actions; -import org.junit.jupiter.api.Test; +import java.util.stream.Stream; -import java.util.concurrent.ExecutionException; - -public class TestInvokeModelWithResponseStream extends IntegrationTestBase { - - @Test - void testTitanText() throws ExecutionException, InterruptedException { - String result = com.example.bedrockruntime.models.amazonTitanText.InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); - } - - @Test - void testClaude() throws ExecutionException, InterruptedException { - String result = com.example.bedrockruntime.models.anthropicClaude.InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommand() throws ExecutionException, InterruptedException { - String result = com.example.bedrockruntime.models.cohereCommand.Command_InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); - } - - @Test - void testCohereCommandR() throws ExecutionException, InterruptedException { - String result = com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); - } - - @Test - void testLlama3() { - String result = com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); +public class TestInvokeModelWithResponseStream extends AbstractModelTest { + protected String getMethodName() { + return "invokeModelWithResponseStream"; } - @Test - void testMistral() throws ExecutionException, InterruptedException { - String result = com.example.bedrockruntime.models.mistral.InvokeModelWithResponseStream.invokeModelWithResponseStream(); - assertNotNullOrEmpty(result); + protected Stream modelProvider() { + return Stream.of( + new TestInvokeModel.ModelTest("Claude", com.example.bedrockruntime.models.anthropicClaude.InvokeModelWithResponseStream.class), + new TestInvokeModel.ModelTest("CohereCommand", com.example.bedrockruntime.models.cohereCommand.Command_InvokeModelWithResponseStream.class), + new TestInvokeModel.ModelTest("CohereCommandR", com.example.bedrockruntime.models.cohereCommand.Command_R_InvokeModelWithResponseStream.class), + new TestInvokeModel.ModelTest("Llama", com.example.bedrockruntime.models.metaLlama.Llama3_InvokeModelWithResponseStream.class), + new TestInvokeModel.ModelTest("Mistral", com.example.bedrockruntime.models.mistral.InvokeModelWithResponseStream.class), + new TestInvokeModel.ModelTest("TitanText", com.example.bedrockruntime.models.amazonTitanText.InvokeModelWithResponseStream.class) + ); } } diff --git a/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java b/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java index 3eda004aac4..2787bf67c9a 100644 --- a/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java +++ b/javav2/example_code/bedrock-runtime/src/test/java/scenarios/TestAmazonTitanTextScenarios.java @@ -3,14 +3,13 @@ package scenarios; -import actions.IntegrationTestBase; import org.junit.jupiter.api.Test; import static com.example.bedrockruntime.models.amazonTitanText.TextScenarios.invokeWithConversation; import static com.example.bedrockruntime.models.amazonTitanText.TextScenarios.invokeWithSystemPrompt; import static org.junit.jupiter.api.Assertions.assertFalse; -class TestAmazonTitanTextScenarios extends IntegrationTestBase { +class TestAmazonTitanTextScenarios { @Test void invokeWithSystemPromptScenario() { diff --git a/javav2/example_code/entityresolution/.gitignore b/javav2/example_code/entityresolution/.gitignore new file mode 100644 index 00000000000..5ff6309b719 --- /dev/null +++ b/javav2/example_code/entityresolution/.gitignore @@ -0,0 +1,38 @@ +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ + +### IntelliJ IDEA ### +.idea/modules.xml +.idea/jarRepositories.xml +.idea/compiler.xml +.idea/libraries/ +*.iws +*.iml +*.ipr + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store \ No newline at end of file diff --git a/javav2/example_code/entityresolution/README.md b/javav2/example_code/entityresolution/README.md new file mode 100644 index 00000000000..26d4ccfefa5 --- /dev/null +++ b/javav2/example_code/entityresolution/README.md @@ -0,0 +1,123 @@ +# AWS Entity Resolution code examples for the SDK for Java 2.x + +## Overview + +Shows how to use the AWS SDK for Java 2.x to work with AWS Entity Resolution. + + + + +_AWS Entity Resolution helps organizations extract, link, and organize information from multiple data sources._ + +## ⚠ Important + +* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + + + + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../../README.md#Prerequisites) in the `javav2` folder. + + + + + +### Get started + +- [Hello AWS Entity Resolution](src/main/java/com/example/entity/HelloEntityResoultion.java#L19) (`listMatchingWorkflows`) + + +### Basics + +Code examples that show you how to perform the essential operations within a service. + +- [Learn the basics](src/main/java/com/example/entity/scenario/EntityResScenario.java) + + +### Single actions + +Code excerpts that show you how to call individual service functions. + +- [CheckWorkflowStatus](src/main/java/com/example/entity/scenario/EntityResActions.java#L393) +- [CreateMatchingWorkflow](src/main/java/com/example/entity/scenario/EntityResActions.java#L431) +- [CreateSchemaMapping](src/main/java/com/example/entity/scenario/EntityResActions.java#L232) +- [DeleteMatchingWorkflow](src/main/java/com/example/entity/scenario/EntityResActions.java#L198) +- [DeleteSchemaMapping](src/main/java/com/example/entity/scenario/EntityResActions.java#L139) +- [GetMatchingJob](src/main/java/com/example/entity/scenario/EntityResActions.java#L319) +- [GetSchemaMapping](src/main/java/com/example/entity/scenario/EntityResActions.java#L282) +- [ListSchemaMappings](src/main/java/com/example/entity/scenario/EntityResActions.java#L175) +- [StartMatchingJob](src/main/java/com/example/entity/scenario/EntityResActions.java#L356) +- [TagEntityResource](src/main/java/com/example/entity/scenario/EntityResActions.java#L518) + + + + + +## Run the examples + +### Instructions + + + + + +#### Hello AWS Entity Resolution + +This example shows you how to get started using AWS Entity Resolution. + + +#### Learn the basics + +This example shows you how to do the following: + +- Create Schema Mapping. +- Create an AWS Entity Resolution workflow. +- Start the matching job for the workflow. +- Get details for the matching job. +- Get Schema Mapping. +- List all Schema Mappings. +- Tag the Schema Mapping resource. +- Delete the AWS Entity Resolution Assets. + + + + + + + + + +### Tests + +⚠ Running tests might result in charges to your AWS account. + + +To find instructions for running these tests, see the [README](../../README.md#Tests) +in the `javav2` folder. + + + + + + +## Additional resources + +- [AWS Entity Resolution User Guide](https://docs.aws.amazon.com/entityresolution/latest/userguide/what-is-service.html) +- [AWS Entity Resolution API Reference](https://docs.aws.amazon.com/entityresolution/latest/apireference/Welcome.html) +- [SDK for Java 2.x AWS Entity Resolution reference](https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/services/entityresolution/package-summary.html) + + + + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/example_code/entityresolution/pom.xml b/javav2/example_code/entityresolution/pom.xml new file mode 100644 index 00000000000..a70292a446b --- /dev/null +++ b/javav2/example_code/entityresolution/pom.xml @@ -0,0 +1,132 @@ + + + 4.0.0 + + org.example + entityresolution + 1.0-SNAPSHOT + + UTF-8 + 17 + 17 + 17 + + + + + org.apache.maven.plugins + maven-surefire-plugin + 2.22.1 + + IntegrationTest + + + + + + + + software.amazon.awssdk + bom + 2.29.45 + pom + import + + + org.apache.logging.log4j + log4j-bom + 2.23.1 + pom + import + + + + + + org.junit.jupiter + junit-jupiter-api + 5.9.2 + test + + + org.junit.jupiter + junit-jupiter-engine + 5.9.2 + test + + + software.amazon.awssdk + secretsmanager + + + com.google.code.gson + gson + 2.10.1 + + + org.junit.platform + junit-platform-commons + 1.9.2 + + + org.junit.platform + junit-platform-launcher + 1.9.2 + test + + + software.amazon.awssdk + entityresolution + + + com.opencsv + opencsv + 5.7.1 + + + software.amazon.awssdk + s3 + + + + org.fusesource.jansi + jansi + 2.4.0 + + + software.amazon.awssdk + netty-nio-client + + + software.amazon.awssdk + cloudformation + + + software.amazon.awssdk + sso + + + software.amazon.awssdk + ssooidc + + + org.apache.logging.log4j + log4j-core + + + org.slf4j + slf4j-api + 2.0.13 + + + org.apache.logging.log4j + log4j-slf4j2-impl + + + org.apache.logging.log4j + log4j-1.2-api + + + \ No newline at end of file diff --git a/javav2/example_code/entityresolution/src/main/java/com/example/entity/HelloEntityResoultion.java b/javav2/example_code/entityresolution/src/main/java/com/example/entity/HelloEntityResoultion.java new file mode 100644 index 00000000000..f5dcbc3aeec --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/java/com/example/entity/HelloEntityResoultion.java @@ -0,0 +1,94 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.entity; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; +import software.amazon.awssdk.core.retry.RetryMode; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.entityresolution.EntityResolutionAsyncClient; +import software.amazon.awssdk.services.entityresolution.model.ListMatchingWorkflowsRequest; +import software.amazon.awssdk.services.entityresolution.paginators.ListMatchingWorkflowsPublisher; +import java.time.Duration; +import java.util.concurrent.CompletableFuture; + +// snippet-start:[entityres.java2_hello.main] +/** + * Before running this Java V2 code example, set up your development + * environment, including your credentials. + * + * For more information, see the following documentation topic: + * + * https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html + */ +public class HelloEntityResoultion { + + private static final Logger logger = LoggerFactory.getLogger(HelloEntityResoultion.class); + + private static EntityResolutionAsyncClient entityResolutionAsyncClient; + public static void main(String[] args) { + listMatchingWorkflows(); + } + + public static EntityResolutionAsyncClient getResolutionAsyncClient() { + if (entityResolutionAsyncClient == null) { + /* + The `NettyNioAsyncHttpClient` class is part of the AWS SDK for Java, version 2, + and it is designed to provide a high-performance, asynchronous HTTP client for interacting with AWS services. + It uses the Netty framework to handle the underlying network communication and the Java NIO API to + provide a non-blocking, event-driven approach to HTTP requests and responses. + */ + + SdkAsyncHttpClient httpClient = NettyNioAsyncHttpClient.builder() + .maxConcurrency(50) // Adjust as needed. + .connectionTimeout(Duration.ofSeconds(60)) // Set the connection timeout. + .readTimeout(Duration.ofSeconds(60)) // Set the read timeout. + .writeTimeout(Duration.ofSeconds(60)) // Set the write timeout. + .build(); + + ClientOverrideConfiguration overrideConfig = ClientOverrideConfiguration.builder() + .apiCallTimeout(Duration.ofMinutes(2)) // Set the overall API call timeout. + .apiCallAttemptTimeout(Duration.ofSeconds(90)) // Set the individual call attempt timeout. + .retryStrategy(RetryMode.STANDARD) + .build(); + + entityResolutionAsyncClient = EntityResolutionAsyncClient.builder() + .region(Region.US_EAST_1) + .httpClient(httpClient) + .overrideConfiguration(overrideConfig) + .build(); + } + return entityResolutionAsyncClient; + } + + /** + * Lists all matching workflows using an asynchronous paginator. + *

+ * This method requests a paginated list of matching workflows from the + * AWS Entity Resolution service and logs the names of the retrieved workflows. + * It uses an asynchronous approach with a paginator and waits for the operation + * to complete using {@code CompletableFuture#join()}. + *

+ */ + public static void listMatchingWorkflows() { + ListMatchingWorkflowsRequest request = ListMatchingWorkflowsRequest.builder().build(); + + ListMatchingWorkflowsPublisher paginator = + getResolutionAsyncClient().listMatchingWorkflowsPaginator(request); + + // Iterate through the paginated results asynchronously + CompletableFuture future = paginator.subscribe(response -> { + response.workflowSummaries().forEach(workflow -> + logger.info("Matching Workflow Name: " + workflow.workflowName()) + ); + }); + + // Wait for the asynchronous operation to complete + future.join(); + } +} +// snippet-end:[entityres.java2_hello.main] diff --git a/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/CloudFormationHelper.java b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/CloudFormationHelper.java new file mode 100644 index 00000000000..12f48a586bd --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/CloudFormationHelper.java @@ -0,0 +1,188 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.entity.scenario; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; +import software.amazon.awssdk.core.retry.RetryMode; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; +import software.amazon.awssdk.services.cloudformation.CloudFormationAsyncClient; +import software.amazon.awssdk.services.cloudformation.model.Capability; +import software.amazon.awssdk.services.cloudformation.model.CloudFormationException; +import software.amazon.awssdk.services.cloudformation.model.DescribeStacksRequest; +import software.amazon.awssdk.services.cloudformation.model.DescribeStacksResponse; +import software.amazon.awssdk.services.cloudformation.model.Output; +import software.amazon.awssdk.services.cloudformation.model.Stack; +import software.amazon.awssdk.services.cloudformation.waiters.CloudFormationAsyncWaiter; +import software.amazon.awssdk.services.s3.S3AsyncClient; +import software.amazon.awssdk.services.s3.model.DeleteObjectResponse; + +import java.io.IOException; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.time.Duration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.stream.Collectors; + +public class CloudFormationHelper { + private static final String CFN_TEMPLATE = "template.yaml"; + private static final Logger logger = LoggerFactory.getLogger(CloudFormationHelper.class); + + private static CloudFormationAsyncClient cloudFormationClient; + + public static void main(String[] args) { + emptyS3Bucket(args[0]); + } + + private static CloudFormationAsyncClient getCloudFormationClient() { + if (cloudFormationClient == null) { + SdkAsyncHttpClient httpClient = NettyNioAsyncHttpClient.builder() + .maxConcurrency(100) + .connectionTimeout(Duration.ofSeconds(60)) + .readTimeout(Duration.ofSeconds(60)) + .writeTimeout(Duration.ofSeconds(60)) + .build(); + + ClientOverrideConfiguration overrideConfig = ClientOverrideConfiguration.builder() + .apiCallTimeout(Duration.ofMinutes(2)) + .apiCallAttemptTimeout(Duration.ofSeconds(90)) + .retryStrategy(RetryMode.STANDARD) + .build(); + + cloudFormationClient = CloudFormationAsyncClient.builder() + .httpClient(httpClient) + .overrideConfiguration(overrideConfig) + .build(); + } + return cloudFormationClient; + } + + public static void deployCloudFormationStack(String stackName) { + String templateBody; + boolean doesExist = describeStack(stackName); + if (!doesExist) { + try { + ClassLoader classLoader = Thread.currentThread().getContextClassLoader(); + Path filePath = Paths.get(classLoader.getResource(CFN_TEMPLATE).toURI()); + templateBody = Files.readString(filePath); + } catch (IOException | URISyntaxException e) { + throw new RuntimeException(e); + } + + getCloudFormationClient().createStack(b -> b.stackName(stackName) + .templateBody(templateBody) + .capabilities(Capability.CAPABILITY_IAM)) + .whenComplete((csr, t) -> { + if (csr != null) { + System.out.println("Stack creation requested, ARN is " + csr.stackId()); + try (CloudFormationAsyncWaiter waiter = getCloudFormationClient().waiter()) { + waiter.waitUntilStackCreateComplete(request -> request.stackName(stackName)) + .whenComplete((dsr, th) -> { + if (th != null) { + System.out.println("Error waiting for stack creation: " + th.getMessage()); + } else { + dsr.matched().response().orElseThrow(() -> new RuntimeException("Failed to deploy")); + System.out.println("Stack created successfully"); + } + }).join(); + } + } else { + System.out.format("Error creating stack: " + t.getMessage(), t); + throw new RuntimeException(t.getCause().getMessage(), t); + } + }).join(); + } else { + logger.info("{} stack already exists", stackName); + } + } + + // Check to see if the Stack exists before deploying it + public static Boolean describeStack(String stackName) { + try { + CompletableFuture future = getCloudFormationClient().describeStacks(); + DescribeStacksResponse stacksResponse = (DescribeStacksResponse) future.join(); + List stacks = stacksResponse.stacks(); + for (Stack myStack : stacks) { + if (myStack.stackName().compareTo(stackName) == 0) { + return true; + } + } + } catch (CloudFormationException e) { + System.err.println(e.getMessage()); + } + return false; + } + + public static void destroyCloudFormationStack(String stackName) { + getCloudFormationClient().deleteStack(b -> b.stackName(stackName)) + .whenComplete((dsr, t) -> { + if (dsr != null) { + System.out.println("Delete stack requested ...."); + try (CloudFormationAsyncWaiter waiter = getCloudFormationClient().waiter()) { + waiter.waitUntilStackDeleteComplete(request -> request.stackName(stackName)) + .whenComplete((waiterResponse, throwable) -> + System.out.println("Stack deleted successfully.")) + .join(); + } + } else { + System.out.format("Error deleting stack: " + t.getMessage(), t); + throw new RuntimeException(t.getCause().getMessage(), t); + } + }).join(); + } + + public static CompletableFuture> getStackOutputsAsync(String stackName) { + CloudFormationAsyncClient cloudFormationAsyncClient = getCloudFormationClient(); + + DescribeStacksRequest describeStacksRequest = DescribeStacksRequest.builder() + .stackName(stackName) + .build(); + + return cloudFormationAsyncClient.describeStacks(describeStacksRequest) + .handle((describeStacksResponse, throwable) -> { + if (throwable != null) { + throw new RuntimeException("Failed to get stack outputs for: " + stackName, throwable); + } + + // Process the result + if (describeStacksResponse.stacks().isEmpty()) { + throw new RuntimeException("Stack not found: " + stackName); + } + + Stack stack = describeStacksResponse.stacks().get(0); + Map outputs = new HashMap<>(); + for (Output output : stack.outputs()) { + outputs.put(output.outputKey(), output.outputValue()); + } + + return outputs; + }); + } + + public static void emptyS3Bucket(String bucketName) { + S3AsyncClient s3Client = S3AsyncClient.builder().build(); + + s3Client.listObjectsV2(req -> req.bucket(bucketName)) + .thenCompose(response -> { + List> deleteFutures = response.contents().stream() + .map(s3Object -> s3Client.deleteObject(req -> req + .bucket(bucketName) + .key(s3Object.key()))) + .collect(Collectors.toList()); + + return CompletableFuture.allOf(deleteFutures.toArray(new CompletableFuture[0])); + }) + .join(); + + s3Client.close(); + } +} + diff --git a/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResActions.java b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResActions.java new file mode 100644 index 00000000000..b29a3cbec84 --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResActions.java @@ -0,0 +1,764 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.entity.scenario; + +import com.opencsv.CSVReader; +import com.opencsv.exceptions.CsvException; +import org.fusesource.jansi.AnsiConsole; +import software.amazon.awssdk.core.async.AsyncRequestBody; +import software.amazon.awssdk.core.async.AsyncResponseTransformer; +import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; +import software.amazon.awssdk.core.retry.RetryMode; +import software.amazon.awssdk.http.async.SdkAsyncHttpClient; +import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; +import software.amazon.awssdk.regions.Region; +import software.amazon.awssdk.services.entityresolution.EntityResolutionAsyncClient; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.services.entityresolution.model.ConflictException; +import software.amazon.awssdk.services.entityresolution.model.CreateMatchingWorkflowRequest; +import software.amazon.awssdk.services.entityresolution.model.CreateMatchingWorkflowResponse; +import software.amazon.awssdk.services.entityresolution.model.CreateSchemaMappingRequest; +import software.amazon.awssdk.services.entityresolution.model.CreateSchemaMappingResponse; +import software.amazon.awssdk.services.entityresolution.model.DeleteMatchingWorkflowRequest; +import software.amazon.awssdk.services.entityresolution.model.DeleteMatchingWorkflowResponse; +import software.amazon.awssdk.services.entityresolution.model.DeleteSchemaMappingRequest; +import software.amazon.awssdk.services.entityresolution.model.DeleteSchemaMappingResponse; +import software.amazon.awssdk.services.entityresolution.model.GetMatchingJobRequest; +import software.amazon.awssdk.services.entityresolution.model.GetMatchingJobResponse; +import software.amazon.awssdk.services.entityresolution.model.GetSchemaMappingRequest; +import software.amazon.awssdk.services.entityresolution.model.GetSchemaMappingResponse; +import software.amazon.awssdk.services.entityresolution.model.InputSource; +import software.amazon.awssdk.services.entityresolution.model.JobMetrics; +import software.amazon.awssdk.services.entityresolution.model.ListSchemaMappingsRequest; +import software.amazon.awssdk.services.entityresolution.model.OutputAttribute; +import software.amazon.awssdk.services.entityresolution.model.OutputSource; +import software.amazon.awssdk.services.entityresolution.model.ResolutionTechniques; +import software.amazon.awssdk.services.entityresolution.model.ResolutionType; +import software.amazon.awssdk.services.entityresolution.model.ResourceNotFoundException; +import software.amazon.awssdk.services.entityresolution.model.SchemaAttributeType; +import software.amazon.awssdk.services.entityresolution.model.SchemaInputAttribute; +import software.amazon.awssdk.services.entityresolution.model.StartMatchingJobRequest; +import software.amazon.awssdk.services.entityresolution.model.TagResourceResponse; +import software.amazon.awssdk.services.entityresolution.model.ValidationException; +import software.amazon.awssdk.services.entityresolution.paginators.ListSchemaMappingsPublisher; +import software.amazon.awssdk.services.s3.S3AsyncClient; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.ListObjectsV2Request; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; +import software.amazon.awssdk.services.s3.model.S3Object; +import software.amazon.awssdk.services.entityresolution.model.TagResourceRequest; + +import java.io.IOException; +import java.io.StringReader; +import java.time.Duration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.CompletionException; +import java.util.stream.Collectors; +import java.util.stream.IntStream; + +import static org.fusesource.jansi.Ansi.ansi; + +// snippet-start:[entityres.java2_actions.main] +public class EntityResActions { + + private static final String PREFIX = "eroutput/"; + private static final Logger logger = LoggerFactory.getLogger(EntityResActions.class); + + private static EntityResolutionAsyncClient entityResolutionAsyncClient; + + private static S3AsyncClient s3AsyncClient; + + public static EntityResolutionAsyncClient getResolutionAsyncClient() { + if (entityResolutionAsyncClient == null) { + /* + The `NettyNioAsyncHttpClient` class is part of the AWS SDK for Java, version 2, + and it is designed to provide a high-performance, asynchronous HTTP client for interacting with AWS services. + It uses the Netty framework to handle the underlying network communication and the Java NIO API to + provide a non-blocking, event-driven approach to HTTP requests and responses. + */ + + SdkAsyncHttpClient httpClient = NettyNioAsyncHttpClient.builder() + .maxConcurrency(50) // Adjust as needed. + .connectionTimeout(Duration.ofSeconds(60)) // Set the connection timeout. + .readTimeout(Duration.ofSeconds(60)) // Set the read timeout. + .writeTimeout(Duration.ofSeconds(60)) // Set the write timeout. + .build(); + + ClientOverrideConfiguration overrideConfig = ClientOverrideConfiguration.builder() + .apiCallTimeout(Duration.ofMinutes(2)) // Set the overall API call timeout. + .apiCallAttemptTimeout(Duration.ofSeconds(90)) // Set the individual call attempt timeout. + .retryStrategy(RetryMode.STANDARD) + .build(); + + entityResolutionAsyncClient = EntityResolutionAsyncClient.builder() + .region(Region.US_EAST_1) + .httpClient(httpClient) + .overrideConfiguration(overrideConfig) + .build(); + } + return entityResolutionAsyncClient; + } + + public static S3AsyncClient getS3AsyncClient() { + if (s3AsyncClient == null) { + /* + The `NettyNioAsyncHttpClient` class is part of the AWS SDK for Java, version 2, + and it is designed to provide a high-performance, asynchronous HTTP client for interacting with AWS services. + It uses the Netty framework to handle the underlying network communication and the Java NIO API to + provide a non-blocking, event-driven approach to HTTP requests and responses. + */ + + SdkAsyncHttpClient httpClient = NettyNioAsyncHttpClient.builder() + .maxConcurrency(50) // Adjust as needed. + .connectionTimeout(Duration.ofSeconds(60)) // Set the connection timeout. + .readTimeout(Duration.ofSeconds(60)) // Set the read timeout. + .writeTimeout(Duration.ofSeconds(60)) // Set the write timeout. + .build(); + + ClientOverrideConfiguration overrideConfig = ClientOverrideConfiguration.builder() + .apiCallTimeout(Duration.ofMinutes(2)) // Set the overall API call timeout. + .apiCallAttemptTimeout(Duration.ofSeconds(90)) // Set the individual call attempt timeout. + .retryStrategy(RetryMode.STANDARD) + .build(); + + s3AsyncClient = S3AsyncClient.builder() + .region(Region.US_EAST_1) + .httpClient(httpClient) + .overrideConfiguration(overrideConfig) + .build(); + } + return s3AsyncClient; + } + + // snippet-start:[entityres.java2_delete_mappings.main] + /** + * Deletes the schema mapping asynchronously. + * + * @param schemaName the name of the schema to delete + * @return a {@link CompletableFuture} that completes when the schema mapping is deleted successfully, + * or throws a {@link RuntimeException} if the deletion fails + */ + public CompletableFuture deleteSchemaMappingAsync(String schemaName) { + DeleteSchemaMappingRequest request = DeleteSchemaMappingRequest.builder() + .schemaName(schemaName) + .build(); + + return getResolutionAsyncClient().deleteSchemaMapping(request) + .whenComplete((response, exception) -> { + if (response != null) { + // Successfully deleted the schema mapping, log the success message. + logger.info("Schema mapping '{}' deleted successfully.", schemaName); + } else { + // Ensure exception is not null before accessing its cause. + if (exception == null) { + throw new CompletionException("An unknown error occurred while deleting the schema mapping.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The schema mapping was not found to delete: " + schemaName, cause); + } + + // Wrap other AWS exceptions in a CompletionException. + throw new CompletionException("Failed to delete schema mapping: " + schemaName, exception); + } + }); + } + // snippet-end:[entityres.java2_delete_mappings.main] + + // snippet-start:[entityres.java2_list_mappings.main] + /** + * Lists the schema mappings associated with the current AWS account. This method uses an asynchronous paginator to + * retrieve the schema mappings, and prints the name of each schema mapping to the console. + */ + public void ListSchemaMappings() { + ListSchemaMappingsRequest mappingsRequest = ListSchemaMappingsRequest.builder() + .build(); + + ListSchemaMappingsPublisher paginator = getResolutionAsyncClient().listSchemaMappingsPaginator(mappingsRequest); + + // Iterate through the pages of results + CompletableFuture future = paginator.subscribe(response -> { + response.schemaList().forEach(schemaMapping -> + logger.info("Schema Mapping Name: " + schemaMapping.schemaName()) + ); + }); + + // Wait for the asynchronous operation to complete + future.join(); + } + // snippet-end:[entityres.java2_list_mappings.main] + + // snippet-start:[entityres.java2_delete_matching_workflow.main] + /** + * Asynchronously deletes a workflow with the specified name. + * + * @param workflowName the name of the workflow to be deleted + * @return a {@link CompletableFuture} that completes when the workflow has been deleted + * @throws RuntimeException if the deletion of the workflow fails + */ + public CompletableFuture deleteMatchingWorkflowAsync(String workflowName) { + DeleteMatchingWorkflowRequest request = DeleteMatchingWorkflowRequest.builder() + .workflowName(workflowName) + .build(); + + return getResolutionAsyncClient().deleteMatchingWorkflow(request) + .whenComplete((response, exception) -> { + if (response != null) { + logger.info("{} was deleted", workflowName ); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while deleting the workflow.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The workflow to delete was not found.", cause); + } + + // Wrap other AWS exceptions in a CompletionException. + throw new CompletionException("Failed to delete workflow: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_delete_matching_workflow.main] + + // snippet-start:[entityres.java2_create_schema.main] + /** + * Creates a schema mapping asynchronously. + * + * @param schemaName the name of the schema to create + * @return a {@link CompletableFuture} that represents the asynchronous creation of the schema mapping + */ + public CompletableFuture createSchemaMappingAsync(String schemaName) { + List schemaAttributes = null; + if (schemaName.startsWith("json")) { + schemaAttributes = List.of( + SchemaInputAttribute.builder().matchKey("id").fieldName("id").type(SchemaAttributeType.UNIQUE_ID).build(), + SchemaInputAttribute.builder().matchKey("name").fieldName("name").type(SchemaAttributeType.NAME).build(), + SchemaInputAttribute.builder().matchKey("email").fieldName("email").type(SchemaAttributeType.EMAIL_ADDRESS).build() + ); + } else { + schemaAttributes = List.of( + SchemaInputAttribute.builder().matchKey("id").fieldName("id").type(SchemaAttributeType.UNIQUE_ID).build(), + SchemaInputAttribute.builder().matchKey("name").fieldName("name").type(SchemaAttributeType.NAME).build(), + SchemaInputAttribute.builder().matchKey("email").fieldName("email").type(SchemaAttributeType.EMAIL_ADDRESS).build(), + SchemaInputAttribute.builder().fieldName("phone").type(SchemaAttributeType.PROVIDER_ID).subType("STRING").build() + ); + } + + CreateSchemaMappingRequest request = CreateSchemaMappingRequest.builder() + .schemaName(schemaName) + .mappedInputFields(schemaAttributes) + .build(); + + return getResolutionAsyncClient().createSchemaMapping(request) + .whenComplete((response, exception) -> { + if (response != null) { + logger.info("[{}] schema mapping Created Successfully!", schemaName); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while creating the schema mapping.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ConflictException) { + throw new CompletionException("A conflicting schema mapping already exists. Resolve conflicts before proceeding.", cause); + } + + // Wrap other AWS exceptions in a CompletionException. + throw new CompletionException("Failed to create schema mapping: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_create_schema.main] + + // snippet-start:[entityres.java2_get_schema_mapping.main] + /** + * Retrieves the schema mapping asynchronously. + * + * @param schemaName the name of the schema to retrieve the mapping for + * @return a {@link CompletableFuture} that completes with the {@link GetSchemaMappingResponse} when the operation + * is complete + * @throws RuntimeException if the schema mapping retrieval fails + */ + public CompletableFuture getSchemaMappingAsync(String schemaName) { + GetSchemaMappingRequest mappingRequest = GetSchemaMappingRequest.builder() + .schemaName(schemaName) + .build(); + + return getResolutionAsyncClient().getSchemaMapping(mappingRequest) + .whenComplete((response, exception) -> { + if (response != null) { + response.mappedInputFields().forEach(attribute -> + logger.info("Attribute Name: " + attribute.fieldName() + + ", Attribute Type: " + attribute.type().toString())); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while getting schema mapping.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The requested schema mapping was not found.", cause); + } + + // Wrap other exceptions in a CompletionException with the message. + throw new CompletionException("Failed to get schema mapping: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_get_schema_mapping.main] + + // snippet-start:[entityres.java2_get_job.main] + /** + * Asynchronously retrieves a matching job based on the provided job ID and workflow name. + * + * @param jobId the ID of the job to retrieve + * @param workflowName the name of the workflow associated with the job + * @return a {@link CompletableFuture} that completes when the job information is available or an exception occurs + */ + public CompletableFuture getMatchingJobAsync(String jobId, String workflowName) { + GetMatchingJobRequest request = GetMatchingJobRequest.builder() + .jobId(jobId) + .workflowName(workflowName) + .build(); + + return getResolutionAsyncClient().getMatchingJob(request) + .whenComplete((response, exception) -> { + if (response != null) { + // Successfully fetched the matching job details, log the job status. + logger.info("Job status: " + response.status()); + logger.info("Job details: " + response.toString()); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while fetching the matching job.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The requested job could not be found.", cause); + } + + // Wrap other exceptions in a CompletionException with the message. + throw new CompletionException("Error fetching matching job: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_get_job.main] + + // snippet-start:[entityres.java2_start_job.main] + + /** + * Starts a matching job asynchronously for the specified workflow name. + * + * @param workflowName the name of the workflow for which to start the matching job + * @return a {@link CompletableFuture} that completes with the job ID of the started matching job, or an empty + * string if the operation fails + */ + public CompletableFuture startMatchingJobAsync(String workflowName) { + StartMatchingJobRequest jobRequest = StartMatchingJobRequest.builder() + .workflowName(workflowName) + .build(); + + return getResolutionAsyncClient().startMatchingJob(jobRequest) + .whenComplete((response, exception) -> { + if (response != null) { + String jobId = response.jobId(); + logger.info("Job ID: " + jobId); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while starting the job.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ConflictException) { + throw new CompletionException("The job is already running. Resolve conflicts before starting a new job.", cause); + } + + // Wrap other AWS exceptions in a CompletionException. + throw new CompletionException("Failed to start the job: " + exception.getMessage(), exception); + } + }) + .thenApply(response -> response != null ? response.jobId() : ""); + } + // snippet-end:[entityres.java2_start_job.main] + + // snippet-start:[entityres.java2_check_matching_workflow.main] + /** + * Checks the status of a workflow asynchronously. + * + * @param jobId the ID of the job to check + * @param workflowName the name of the workflow to check + * @return a CompletableFuture that resolves to a boolean value indicating whether the workflow has completed + * successfully + */ + public CompletableFuture checkWorkflowStatusCompleteAsync(String jobId, String workflowName) { + GetMatchingJobRequest request = GetMatchingJobRequest.builder() + .jobId(jobId) + .workflowName(workflowName) + .build(); + + return getResolutionAsyncClient().getMatchingJob(request) + .whenComplete((response, exception) -> { + if (response != null) { + // Process the response and log the job status. + logger.info("Job status: " + response.status()); + } else { + // Ensure exception is not null before accessing its cause. + if (exception == null) { + throw new CompletionException("An unknown error occurred while checking job status.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The requested resource was not found while checking the job status.", cause); + } + + // Wrap other AWS exceptions in a CompletionException. + throw new CompletionException("Failed to check job status: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_check_matching_workflow.main] + + // snippet-start:[entityres.java2_create_matching_workflow.main] + /** + * Creates an asynchronous CompletableFuture to manage the creation of a matching workflow. + * + * @param roleARN the AWS IAM role ARN to be used for the workflow execution + * @param workflowName the name of the workflow to be created + * @param outputBucket the S3 bucket path where the workflow output will be stored + * @param jsonGlueTableArn the ARN of the Glue Data Catalog table to be used as the input source + * @param jsonErSchemaMappingName the name of the schema to be used for the input source + * @return a CompletableFuture that, when completed, will return the ARN of the created workflow + */ + public CompletableFuture createMatchingWorkflowAsync( + String roleARN + , String workflowName + , String outputBucket + , String jsonGlueTableArn + , String jsonErSchemaMappingName + , String csvGlueTableArn + , String csvErSchemaMappingName) { + + InputSource jsonInputSource = InputSource.builder() + .inputSourceARN(jsonGlueTableArn) + .schemaName(jsonErSchemaMappingName) + .applyNormalization(false) + .build(); + + InputSource csvInputSource = InputSource.builder() + .inputSourceARN(csvGlueTableArn) + .schemaName(csvErSchemaMappingName) + .applyNormalization(false) + .build(); + + OutputAttribute idOutputAttribute = OutputAttribute.builder() + .name("id") + .build(); + + OutputAttribute nameOutputAttribute = OutputAttribute.builder() + .name("name") + .build(); + + OutputAttribute emailOutputAttribute = OutputAttribute.builder() + .name("email") + .build(); + + OutputAttribute phoneOutputAttribute = OutputAttribute.builder() + .name("phone") + .build(); + + OutputSource outputSource = OutputSource.builder() + .outputS3Path("s3://" + outputBucket + "/eroutput") + .output(idOutputAttribute, nameOutputAttribute, emailOutputAttribute, phoneOutputAttribute) + .applyNormalization(false) + .build(); + + ResolutionTechniques resolutionType = ResolutionTechniques.builder() + .resolutionType(ResolutionType.ML_MATCHING) + .build(); + + CreateMatchingWorkflowRequest workflowRequest = CreateMatchingWorkflowRequest.builder() + .roleArn(roleARN) + .description("Created by using the AWS SDK for Java") + .workflowName(workflowName) + .inputSourceConfig(List.of(jsonInputSource, csvInputSource)) + .outputSourceConfig(List.of(outputSource)) + .resolutionTechniques(resolutionType) + .build(); + + return getResolutionAsyncClient().createMatchingWorkflow(workflowRequest) + .whenComplete((response, exception) -> { + if (response != null) { + logger.info("Workflow created successfully."); + } else { + Throwable cause = exception.getCause(); + if (cause instanceof ValidationException) { + throw new CompletionException("Invalid request: Please check input parameters.", cause); + } + + if (cause instanceof ConflictException) { + throw new CompletionException("A conflicting workflow already exists. Resolve conflicts before proceeding.", cause); + } + throw new CompletionException("Failed to create workflow: " + exception.getMessage(), exception); + } + }) + .thenApply(CreateMatchingWorkflowResponse::workflowArn); + } + // snippet-end:[entityres.java2_create_matching_workflow.main] + + // snippet-start:[entityres.java2_tag_resource.main] + /** + * Tags the specified schema mapping ARN. + * + * @param schemaMappingARN the ARN of the schema mapping to tag + */ + public CompletableFuture tagEntityResource(String schemaMappingARN) { + Map tags = new HashMap<>(); + tags.put("tag1", "tag1Value"); + tags.put("tag2", "tag2Value"); + + TagResourceRequest request = TagResourceRequest.builder() + .resourceArn(schemaMappingARN) + .tags(tags) + .build(); + + return getResolutionAsyncClient().tagResource(request) + .whenComplete((response, exception) -> { + if (response != null) { + // Successfully tagged the resource, log the success message. + logger.info("Successfully tagged the resource."); + } else { + if (exception == null) { + throw new CompletionException("An unknown error occurred while tagging the resource.", null); + } + + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("The resource to tag was not found.", cause); + } + throw new CompletionException("Failed to tag the resource: " + exception.getMessage(), exception); + } + }); + } + // snippet-end:[entityres.java2_tag_resource.main] + + // snippet-start:[entityres.java2_job_info.main] + public CompletableFuture getJobInfo(String workflowName, String jobId) { + return getResolutionAsyncClient().getMatchingJob(b -> b + .workflowName(workflowName) + .jobId(jobId)) + .whenComplete((response, exception) -> { + if (response != null) { + logger.info("Job metrics fetched successfully for jobId: " + jobId); + } else { + Throwable cause = exception.getCause(); + if (cause instanceof ResourceNotFoundException) { + throw new CompletionException("Invalid request: Job id was not found.", cause); + } + throw new CompletionException("Failed to fetch job info: " + exception.getMessage(), exception); + } + }) + .thenApply(response -> response.metrics()); // Extract job metrics + } + // snippet-end:[entityres.java2_job_info.main] + + /** + * Uploads data to an Amazon S3 bucket asynchronously. + * + * @param bucketName the name of the S3 bucket to upload the data to + * @param jsonData the JSON data to be uploaded + * @param csvData the CSV data to be uploaded + * @return a {@link CompletableFuture} representing both asynchronous operation of uploading the data + * @throws RuntimeException if an error occurs during the file upload + */ + + public void uploadInputData(String bucketName, String jsonData, String csvData) { + // Upload JSON data. + String jsonKey = "jsonData/data.json"; + PutObjectRequest jsonUploadRequest = PutObjectRequest.builder() + .bucket(bucketName) + .key(jsonKey) + .contentType("application/json") + .build(); + + CompletableFuture jsonUploadResponse = getS3AsyncClient().putObject(jsonUploadRequest, AsyncRequestBody.fromString(jsonData)); + + // Upload CSV data. + String csvKey = "csvData/data.csv"; + PutObjectRequest csvUploadRequest = PutObjectRequest.builder() + .bucket(bucketName) + .key(csvKey) + .contentType("text/csv") + .build(); + CompletableFuture csvUploadResponse = getS3AsyncClient().putObject(csvUploadRequest, AsyncRequestBody.fromString(csvData)); + + CompletableFuture.allOf(jsonUploadResponse, csvUploadResponse) + .whenComplete((result, ex) -> { + if (ex != null) { + // Wrap an AWS exception. + throw new CompletionException("Failed to upload files", ex); + } + }).join(); + + } + + /** + * Finds the latest file in the S3 bucket that starts with "run-" in any depth of subfolders + */ + private CompletableFuture findLatestMatchingFile(String bucketName) { + ListObjectsV2Request request = ListObjectsV2Request.builder() + .bucket(bucketName) + .prefix(PREFIX) // Searches within the given folder + .build(); + + return getS3AsyncClient().listObjectsV2(request) + .thenApply(response -> response.contents().stream() + .map(S3Object::key) + .filter(key -> key.matches(".*?/run-[0-9a-zA-Z\\-]+")) // Matches files like run-XXXXX in any subfolder + .max(String::compareTo) // Gets the latest file + .orElse(null)) + .whenComplete((result, exception) -> { + if (exception == null) { + if (result != null) { + logger.info("Latest matching file found: " + result); + } else { + logger.info("No matching files found."); + } + } else { + throw new CompletionException("Failed to find latest matching file: " + exception.getMessage(), exception); + } + }); + } + + /** + * Prints the data located in the file in the S3 bucket that starts with "run-" in any depth of subfolders + */ + public void printData(String bucketName) { + try { + // Find the latest file with "run-" prefix in any depth of subfolders. + String s3Key = findLatestMatchingFile(bucketName).join(); + if (s3Key == null) { + logger.error("No matching files found in S3."); + return; + } + + logger.info("Downloading file: " + s3Key); + + // Read CSV file as String. + String csvContent = readCSVFromS3Async(bucketName, s3Key).join(); + if (csvContent.isEmpty()) { + logger.error("File is empty."); + return; + } + + // Process CSV content. + List records = parseCSV(csvContent); + printTable(records); + + } catch (RuntimeException | IOException | CsvException e) { + logger.error("Error processing CSV file from S3: " + e.getMessage()); + e.printStackTrace(); + } + } + + /** + * Reads a CSV file from S3 and returns it as a String. + */ + private static CompletableFuture readCSVFromS3Async(String bucketName, String s3Key) { + GetObjectRequest getObjectRequest = GetObjectRequest.builder() + .bucket(bucketName) + .key(s3Key) + .build(); + + // Initiating the asynchronous request to get the file as bytes + return getS3AsyncClient().getObject(getObjectRequest, AsyncResponseTransformer.toBytes()) + .thenApply(responseBytes -> responseBytes.asUtf8String()) // Convert bytes to UTF-8 string + .whenComplete((result, exception) -> { + if (exception != null) { + throw new CompletionException("Failed to read CSV from S3: " + exception.getMessage(), exception); + } else { + logger.info("Successfully fetched CSV file content from S3."); + } + }); + } + + /** + * Parses CSV content from a String into a list of records. + */ + private static List parseCSV(String csvContent) throws IOException, CsvException { + try (CSVReader csvReader = new CSVReader(new StringReader(csvContent))) { + return csvReader.readAll(); + } + } + + /** + * Prints the given CSV data in a formatted table + */ + /** + * Prints the given CSV data in a formatted table + */ + private static void printTable(List records) { + if (records.isEmpty()) { + System.out.println("No records found."); + return; + } + + String[] headers = records.get(0); + List rows = records.subList(1, records.size()); + + // Determine column widths dynamically based on longest content + int[] columnWidths = new int[headers.length]; + for (int i = 0; i < headers.length; i++) { + final int columnIndex = i; + int maxWidth = Math.max(headers[i].length(), rows.stream() + .map(row -> row.length > columnIndex ? row[columnIndex].length() : 0) + .max(Integer::compareTo) + .orElse(0)); + columnWidths[i] = Math.min(maxWidth, 25); // Limit max width for better readability + } + + // Enable ANSI Console for colored output + AnsiConsole.systemInstall(); + + // Print table header + System.out.println(ansi().fgYellow().a("=== CSV Data from S3 ===").reset()); + printRow(headers, columnWidths, true); + + // Print rows + rows.forEach(row -> printRow(row, columnWidths, false)); + + // Restore console to normal + AnsiConsole.systemUninstall(); + } + + private static void printRow(String[] row, int[] columnWidths, boolean isHeader) { + String border = IntStream.range(0, columnWidths.length) + .mapToObj(i -> "-".repeat(columnWidths[i] + 2)) + .collect(Collectors.joining("+", "+", "+")); + + if (isHeader) { + System.out.println(border); + } + + System.out.print("|"); + for (int i = 0; i < columnWidths.length; i++) { + String cell = (i < row.length && row[i] != null) ? row[i] : ""; + System.out.printf(" %-" + columnWidths[i] + "s |", isHeader ? ansi().fgBrightBlue().a(cell).reset() : cell); + } + System.out.println(); + + if (isHeader) { + System.out.println(border); + } + } +} +// snippet-end:[entityres.java2_actions.main] \ No newline at end of file diff --git a/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResScenario.java b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResScenario.java new file mode 100644 index 00000000000..75f7dfc26f4 --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/java/com/example/entity/scenario/EntityResScenario.java @@ -0,0 +1,492 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.entity.scenario; + +import software.amazon.awssdk.services.cloudformation.model.CloudFormationException; +import software.amazon.awssdk.services.entityresolution.model.ConflictException; +import software.amazon.awssdk.services.entityresolution.model.CreateSchemaMappingResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import software.amazon.awssdk.services.entityresolution.model.GetMatchingJobResponse; +import software.amazon.awssdk.services.entityresolution.model.GetSchemaMappingResponse; +import software.amazon.awssdk.services.entityresolution.model.JobMetrics; +import software.amazon.awssdk.services.entityresolution.model.ResourceNotFoundException; +import software.amazon.awssdk.services.entityresolution.model.ValidationException; +import software.amazon.awssdk.services.s3.model.S3Exception; +import java.util.Map; +import java.util.Scanner; +import java.util.UUID; +import java.util.concurrent.CompletionException; + +// snippet-start:[entityres.java2_scenario.main] +public class EntityResScenario { + private static final Logger logger = LoggerFactory.getLogger(EntityResScenario.class); + public static final String DASHES = new String(new char[80]).replace("\0", "-"); + private static final String STACK_NAME = "EntityResolutionCdkStack"; + private static final String ENTITY_RESOLUTION_ROLE_ARN_KEY = "EntityResolutionRoleArn"; + private static final String GLUE_DATA_BUCKET_NAME_KEY = "GlueDataBucketName"; + private static final String JSON_GLUE_TABLE_ARN_KEY = "JsonErGlueTableArn"; + private static final String CSV_GLUE_TABLE_ARN_KEY = "CsvErGlueTableArn"; + private static String glueBucketName; + private static String workflowName = "workflow-" + UUID.randomUUID(); + + private static String jsonSchemaMappingName = "jsonschema-" + UUID.randomUUID(); + private static String jsonSchemaMappingArn = null; + private static String csvSchemaMappingName = "csv-" + UUID.randomUUID(); + private static String roleARN; + private static String csvGlueTableArn; + private static String jsonGlueTableArn; + private static Scanner scanner = new Scanner(System.in); + + private static EntityResActions actions = new EntityResActions(); + + public static void main(String[] args) throws InterruptedException { + + logger.info("Welcome to the AWS Entity Resolution Scenario."); + logger.info(""" + AWS Entity Resolution is a fully-managed machine learning service provided by + Amazon Web Services (AWS) that helps organizations extract, link, and + organize information from multiple data sources. It leverages natural + language processing and deep learning models to identify and resolve + entities, such as people, places, organizations, and products, + across structured and unstructured data. + + With Entity Resolution, customers can build robust data integration + pipelines to combine and reconcile data from multiple systems, databases, + and documents. The service can handle ambiguous, incomplete, or conflicting + information, and provide a unified view of entities and their relationships. + This can be particularly valuable in applications such as customer 360, + fraud detection, supply chain management, and knowledge management, where + accurate entity identification is crucial. + + The `EntityResolutionAsyncClient` interface in the AWS SDK for Java 2.x + provides a set of methods to programmatically interact with the AWS Entity + Resolution service. This allows developers to automate the entity extraction, + linking, and deduplication process as part of their data processing workflows. + With Entity Resolution, organizations can unlock the value of their data, + improve decision-making, and enhance customer experiences by having a reliable, + comprehensive view of their key entities. + """); + + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info(""" + To prepare the AWS resources needed for this scenario application, the next step uploads + a CloudFormation template whose resulting stack creates the following resources: + - An AWS Glue Data Catalog table + - An AWS IAM role + - An AWS S3 bucket + - An AWS Entity Resolution Schema + + It can take a couple minutes for the Stack to finish creating the resources. + """); + waitForInputToContinue(scanner); + logger.info("Generating resources..."); + CloudFormationHelper.deployCloudFormationStack(STACK_NAME); + Map outputsMap = CloudFormationHelper.getStackOutputsAsync(STACK_NAME).join(); + roleARN = outputsMap.get(ENTITY_RESOLUTION_ROLE_ARN_KEY); + glueBucketName = outputsMap.get(GLUE_DATA_BUCKET_NAME_KEY); + csvGlueTableArn = outputsMap.get(CSV_GLUE_TABLE_ARN_KEY); + jsonGlueTableArn = outputsMap.get(JSON_GLUE_TABLE_ARN_KEY); + logger.info(DASHES); + waitForInputToContinue(scanner); + + try { + runScenario(); + + } catch (Exception ce) { + Throwable cause = ce.getCause(); + logger.error("An exception happened: " + (cause != null ? cause.getMessage() : ce.getMessage())); + } + } + + private static void runScenario() throws InterruptedException { + /* + This JSON is a valid input for the AWS Entity Resolution service. + The JSON represents an array of three objects, each containing an "id", "name", and "email" + property. This format aligns with the expected input structure for the + Entity Resolution service. + */ + String json = """ + {"id":"1","name":"Jane Doe","email":"jane.doe@example.com"} + {"id":"2","name":"John Doe","email":"john.doe@example.com"} + {"id":"3","name":"Jorge Souza","email":"jorge_souza@example.com"} + """; + logger.info("Upload the following JSON objects to the {} S3 bucket.", glueBucketName); + logger.info(json); + String csv = """ + id,name,email,phone + 1,Jane B.,Doe,jane.doe@example.com,555-876-9846 + 2,John Doe Jr.,john.doe@example.com,555-654-3210 + 3,María García,maría_garcia@company.com,555-567-1234 + 4,Mary Major,mary_major@company.com,555-222-3333 + """; + logger.info("Upload the following CSV data to the {} S3 bucket.", glueBucketName); + logger.info(csv); + waitForInputToContinue(scanner); + try { + actions.uploadInputData(glueBucketName, json, csv); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + + if (cause == null) { + logger.error("Failed to upload input data: {}", ce.getMessage(), ce); + } + + if (cause instanceof ResourceNotFoundException) { + logger.error("Failed to upload input data as the resource was not found: {}", cause.getMessage(), cause); + } + return; + } + logger.info("The JSON and CSV objects have been uploaded to the S3 bucket."); + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("1. Create Schema Mapping"); + logger.info(""" + Entity Resolution schema mapping aligns and integrates data from + multiple sources by identifying and matching corresponding entities + like customers or products. It unifies schemas, resolves conflicts, + and uses machine learning to link related entities, enabling a + consolidated, accurate view for improved data quality and decision-making. + + In this example, the schema mapping lines up with the fields in the JSON and CSV objects. That is, + it contains these fields: id, name, and email. + """); + try { + CreateSchemaMappingResponse response = actions.createSchemaMappingAsync(jsonSchemaMappingName).join(); + jsonSchemaMappingName = response.schemaName(); + logger.info("The JSON schema mapping name is " + jsonSchemaMappingName); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + + if (cause == null) { + logger.error("Failed to create JSON schema mapping: {}", ce.getMessage(), ce); + } + + if (cause instanceof ConflictException) { + logger.error("Schema mapping conflict detected: {}", cause.getMessage(), cause); + } else { + logger.error("Unexpected error while creating schema mapping: {}", cause.getMessage(), cause); + } + return; + } + + try { + CreateSchemaMappingResponse response = actions.createSchemaMappingAsync(csvSchemaMappingName).join(); + csvSchemaMappingName = response.schemaName(); + logger.info("The CSV schema mapping name is " + csvSchemaMappingName); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + if (cause == null) { + logger.error("Failed to create CSV schema mapping: {}", ce.getMessage(), ce); + } + + if (cause instanceof ConflictException) { + logger.error("Schema mapping conflict detected: {}", cause.getMessage(), cause); + } else { + logger.error("Unexpected error while creating CSV schema mapping: {}", cause.getMessage(), cause); + } + return; + } + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("2. Create an AWS Entity Resolution Workflow. "); + logger.info(""" + An Entity Resolution matching workflow identifies and links records + across datasets that represent the same real-world entity, such as + customers or products. Using techniques like schema mapping, + data profiling, and machine learning algorithms, + it evaluates attributes like names or emails to detect duplicates + or relationships, even with variations or inconsistencies. + The workflow outputs consolidated, de-duplicated data. + + We will use the machine learning-based matching technique. + """); + waitForInputToContinue(scanner); + try { + String workflowArn = actions.createMatchingWorkflowAsync( + roleARN, workflowName, glueBucketName, jsonGlueTableArn, + jsonSchemaMappingName, csvGlueTableArn, csvSchemaMappingName).join(); + + logger.info("The workflow ARN is: " + workflowArn); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + + if (cause == null) { + logger.error("An unexpected error occurred: {}", ce.getMessage(), ce); + } + + if (cause instanceof ValidationException) { + logger.error("Validation error: {}", cause.getMessage(), cause); + } else if (cause instanceof ConflictException) { + logger.error("Workflow conflict detected: {}", cause.getMessage(), cause); + } else { + logger.error("Unexpected error: {}", cause.getMessage(), cause); + } + return; + } + + waitForInputToContinue(scanner); + logger.info(DASHES); + logger.info("3. Start the matching job of the " + workflowName + " workflow."); + waitForInputToContinue(scanner); + String jobId = null; + try { + jobId = actions.startMatchingJobAsync(workflowName).join(); + logger.info("The matching job was successfully started."); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + if (cause instanceof ConflictException) { + logger.error("Job conflict detected: {}", cause.getMessage(), cause); + } else { + logger.error("Unexpected error while starting the job: {}", ce.getMessage(), ce); + } + return; + } + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("4. While the matching job is running, let's look at other API methods. First, let's get details for job " + jobId); + waitForInputToContinue(scanner); + try { + actions.getMatchingJobAsync(jobId, workflowName).join(); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + if (cause instanceof ResourceNotFoundException) { + logger.error("The matching job not found: {}", cause.getMessage(), cause); + } else { + logger.error("Failed to start matching job: " + (cause != null ? cause.getMessage() : ce.getMessage())); + } + return; + } + logger.info(DASHES); + + logger.info(DASHES); + logger.info("5. Get the schema mapping for the JSON data."); + waitForInputToContinue(scanner); + try { + GetSchemaMappingResponse response = actions.getSchemaMappingAsync(jsonSchemaMappingName).join(); + jsonSchemaMappingArn = response.schemaArn(); + logger.info("Schema mapping ARN is " + jsonSchemaMappingArn); + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + if (cause instanceof ResourceNotFoundException) { + logger.error("Schema mapping not found: {}", cause.getMessage(), cause); + } else { + logger.error("Error retrieving the specific schema mapping: " + ce.getCause().getMessage()); + } + return; + } + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("6. List Schema Mappings."); + try { + actions.ListSchemaMappings(); + } catch (CompletionException ce) { + logger.error("Error retrieving schema mappings: " + ce.getCause().getMessage()); + return; + } + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("7. Tag the {} resource.", jsonSchemaMappingName); + logger.info(""" + Tags can help you organize and categorize your Entity Resolution resources. + You can also use them to scope user permissions by granting a user permission + to access or change only resources with certain tag values. + In Entity Resolution, SchemaMapping and MatchingWorkflow can be tagged. For this example, + the SchemaMapping is tagged. + """); + try { + actions.tagEntityResource(jsonSchemaMappingArn).join(); + } catch (CompletionException ce) { + logger.error("Error tagging the resource: " + ce.getCause().getMessage()); + return; + } + + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("8. View the results of the AWS Entity Resolution Workflow."); + logger.info(""" + You cannot view the result of the workflow that is in a running state. + In order to view the results, you need to wait for the workflow that we started in step 3 to complete. + + If you choose not to wait, you cannot view the results. You can perform + this task manually in the AWS Management Console. + + This can take up to 30 mins (y/n). + """); + String viewAns = scanner.nextLine().trim(); + boolean isComplete = false; + if (viewAns.equalsIgnoreCase("y")) { + logger.info("You selected to view the Entity Resolution Workflow results."); + countdownWithWorkflowCheck(actions, 1800, jobId, workflowName); + isComplete = true; + try { + JobMetrics metrics = actions.getJobInfo(workflowName, jobId).join(); + logger.info("Number of input records: {}", metrics.inputRecords()); + logger.info("Number of match ids: {}", metrics.matchIDs()); + logger.info("Number of records not processed: {}", metrics.recordsNotProcessed()); + logger.info("Number of total records processed: {}", metrics.totalRecordsProcessed()); + logger.info("The following represents the output data generated by the Entity Resolution workflow based on the JSON and CSV input data. The output data is stored in the {} bucket.", glueBucketName); + actions.printData(glueBucketName); + + logger.info(""" + + Note that each of the last 2 records are considered a match even though the 'name' differs between the records; + For example 'John Doe Jr.' compared to 'John Doe'. + The confidence level is a value between 0 and 1, where 1 indicates a perfect match. + + """); + + } catch (CompletionException ce) { + Throwable cause = ce.getCause(); + if (cause instanceof ResourceNotFoundException) { + logger.error("The job not found: {}", cause.getMessage(), cause); + } else { + logger.error("Error retrieving job information: " + ce.getCause().getMessage()); + } + return; + } + } + + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("9. Do you want to delete the resources, including the workflow? (y/n)"); + logger.info(""" + You cannot delete the workflow that is in a running state. + In order to delete the workflow, you need to wait for the workflow to complete. + + You can delete the workflow manually in the AWS Management Console at a later time. + + If you already waited for the workflow to complete in the previous step, + the workflow is completed and you can delete it. + + If the workflow is not completed, this can take up to 30 mins (y/n). + """); + String delAns = scanner.nextLine().trim(); + if (delAns.equalsIgnoreCase("y")) { + try { + if (!isComplete) { + countdownWithWorkflowCheck(actions, 1800, jobId, workflowName); + } + actions.deleteMatchingWorkflowAsync(workflowName).join(); + logger.info("Workflow deleted successfully!"); + } catch (CompletionException ce) { + logger.info("Error deleting the workflow: {} ", ce.getMessage()); + return; + } + + try { + // Delete both schema mappings. + actions.deleteSchemaMappingAsync(jsonSchemaMappingName).join(); + actions.deleteSchemaMappingAsync(csvSchemaMappingName).join(); + logger.info("Both schema mappings were deleted successfully!"); + } catch (CompletionException ce) { + logger.error("Error deleting schema mapping: {}", ce.getMessage()); + return; + } + + waitForInputToContinue(scanner); + logger.info(DASHES); + logger.info(""" + Now we delete the CloudFormation stack, which deletes + the resources that were created at the beginning of this scenario. + """); + waitForInputToContinue(scanner); + logger.info(DASHES); + try { + deleteCloudFormationStack(); + } catch (RuntimeException e) { + logger.error("Failed to delete the stack: {}", e.getMessage()); + return; + } + + } else { + logger.info("You can delete the AWS resources in the AWS Management Console."); + } + + waitForInputToContinue(scanner); + logger.info(DASHES); + + logger.info(DASHES); + logger.info("This concludes the AWS Entity Resolution scenario."); + logger.info(DASHES); + } + + private static void waitForInputToContinue(Scanner scanner) { + while (true) { + logger.info(""); + logger.info("Enter 'c' followed by to continue:"); + String input = scanner.nextLine(); + + if (input.trim().equalsIgnoreCase("c")) { + logger.info("Continuing with the program..."); + logger.info(""); + break; + } else { + // Handle invalid input. + logger.info("Invalid input. Please try again."); + } + } + } + + public static void countdownWithWorkflowCheck(EntityResActions actions, int totalSeconds, String jobId, String workflowName) throws InterruptedException { + int secondsElapsed = 0; + + while (true) { + // Calculate display minutes and seconds. + int remainingTime = totalSeconds - secondsElapsed; + int displayMinutes = remainingTime / 60; + int displaySeconds = remainingTime % 60; + + // Print the countdown. + System.out.printf("\r%02d:%02d", displayMinutes, displaySeconds); + Thread.sleep(1000); // Wait for 1 second + secondsElapsed++; + + // Check workflow status every 60 seconds. + if (secondsElapsed % 60 == 0 || remainingTime <= 0) { + GetMatchingJobResponse response = actions.checkWorkflowStatusCompleteAsync(jobId, workflowName).join(); + if (response != null && "SUCCEEDED".equalsIgnoreCase(String.valueOf(response.status()))) { + logger.info(""); // Move to the next line after countdown. + logger.info("Countdown complete: Workflow is in Completed state!"); + break; // Break out of the loop if the status is "SUCCEEDED" + } + } + + // If countdown reaches zero, reset it for continuous countdown. + if (remainingTime <= 0) { + secondsElapsed = 0; + } + } + } + + private static void deleteCloudFormationStack() { + try { + CloudFormationHelper.emptyS3Bucket(glueBucketName); + CloudFormationHelper.destroyCloudFormationStack(STACK_NAME); + logger.info("Resources deleted successfully!"); + } catch (CloudFormationException e) { + throw new RuntimeException("Failed to delete CloudFormation stack: " + e.getMessage(), e); + } catch (S3Exception e) { + throw new RuntimeException("Failed to empty S3 bucket: " + e.getMessage(), e); + } + } +} +// snippet-end:[entityres.java2_scenario.main] \ No newline at end of file diff --git a/javav2/example_code/entityresolution/src/main/resources/log4j2.xml b/javav2/example_code/entityresolution/src/main/resources/log4j2.xml new file mode 100644 index 00000000000..225afe2b3a8 --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/resources/log4j2.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/javav2/example_code/entityresolution/src/main/resources/template.yaml b/javav2/example_code/entityresolution/src/main/resources/template.yaml new file mode 100644 index 00000000000..f0395929fa7 --- /dev/null +++ b/javav2/example_code/entityresolution/src/main/resources/template.yaml @@ -0,0 +1,263 @@ +Resources: + ErBucket6EA35F9D: + Type: AWS::S3::Bucket + Properties: + BucketName: erbucketf684533d2680435fa99d24b1bdaf5179 + UpdateReplacePolicy: Delete + DeletionPolicy: Delete + Metadata: + aws:cdk:path: EntityResolutionCdkStack/ErBucket/Resource + GlueDatabase: + Type: AWS::Glue::Database + Properties: + CatalogId: + Ref: AWS::AccountId + DatabaseInput: + Name: entity_resolution_db + Metadata: + aws:cdk:path: EntityResolutionCdkStack/GlueDatabase + jsongluetable: + Type: AWS::Glue::Table + Properties: + CatalogId: + Ref: AWS::AccountId + DatabaseName: + Ref: GlueDatabase + TableInput: + Name: jsongluetable + StorageDescriptor: + Columns: + - Name: id + Type: string + - Name: name + Type: string + - Name: email + Type: string + InputFormat: org.apache.hadoop.mapred.TextInputFormat + Location: + Fn::Join: + - "" + - - s3:// + - Ref: ErBucket6EA35F9D + - /jsonData/ + OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + SerdeInfo: + Parameters: + serialization.format: "1" + SerializationLibrary: org.openx.data.jsonserde.JsonSerDe + TableType: EXTERNAL_TABLE + DependsOn: + - GlueDatabase + Metadata: + aws:cdk:path: EntityResolutionCdkStack/jsongluetable + csvgluetable: + Type: AWS::Glue::Table + Properties: + CatalogId: + Ref: AWS::AccountId + DatabaseName: + Ref: GlueDatabase + TableInput: + Name: csvgluetable + StorageDescriptor: + Columns: + - Name: id + Type: string + - Name: name + Type: string + - Name: email + Type: string + - Name: phone + Type: string + InputFormat: org.apache.hadoop.mapred.TextInputFormat + Location: + Fn::Join: + - "" + - - s3:// + - Ref: ErBucket6EA35F9D + - /csvData/ + OutputFormat: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat + SerdeInfo: + Parameters: + serialization.format: "1" + SerializationLibrary: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe + TableType: EXTERNAL_TABLE + DependsOn: + - GlueDatabase + Metadata: + aws:cdk:path: EntityResolutionCdkStack/csvgluetable + EntityResolutionRoleB51A51D3: + Type: AWS::IAM::Role + Properties: + AssumeRolePolicyDocument: + Statement: + - Action: sts:AssumeRole + Effect: Allow + Principal: + Service: entityresolution.amazonaws.com + Version: "2012-10-17" + ManagedPolicyArns: + - Fn::Join: + - "" + - - "arn:" + - Ref: AWS::Partition + - :iam::aws:policy/AmazonS3FullAccess + - Fn::Join: + - "" + - - "arn:" + - Ref: AWS::Partition + - :iam::aws:policy/AWSEntityResolutionConsoleFullAccess + - Fn::Join: + - "" + - - "arn:" + - Ref: AWS::Partition + - :iam::aws:policy/AWSGlueConsoleFullAccess + - Fn::Join: + - "" + - - "arn:" + - Ref: AWS::Partition + - :iam::aws:policy/service-role/AWSGlueServiceRole + Metadata: + aws:cdk:path: EntityResolutionCdkStack/EntityResolutionRole/Resource + EntityResolutionRoleDefaultPolicy586C8066: + Type: AWS::IAM::Policy + Properties: + PolicyDocument: + Statement: + - Action: + - entityresolution:GetMatchingWorkflow + - entityresolution:StartMatchingWorkflow + Effect: Allow + Resource: "*" + Version: "2012-10-17" + PolicyName: EntityResolutionRoleDefaultPolicy586C8066 + Roles: + - Ref: EntityResolutionRoleB51A51D3 + Metadata: + aws:cdk:path: EntityResolutionCdkStack/EntityResolutionRole/DefaultPolicy/Resource + CDKMetadata: + Type: AWS::CDK::Metadata + Properties: + Analytics: v2:deflate64:H4sIAAAAAAAA/02MzQ7CIBCEn6V3WPuTvoD15EVTvZstRbOWgimgMYR3t4WLp5n5ZjI1VE0LZYEfy8U4cUUDhItDMbEV3YJtIOy9mKRj3V1nF9lDeQlhBQd0OKCVW3nFQcnICGcIvVGJJT0bReK7xexiZL20xi8ibU7evXy6/6ed0SM5MjqyI75xV1dQQls8LRFfvHY0S+iz/gCPIXoRxAAAAA== + Metadata: + aws:cdk:path: EntityResolutionCdkStack/CDKMetadata/Default + Condition: CDKMetadataAvailable +Outputs: + EntityResolutionRoleArn: + Description: The ARN of the EntityResolution Role + Value: + Fn::GetAtt: + - EntityResolutionRoleB51A51D3 + - Arn + JsonErGlueTableArn: + Description: The ARN of the Json Glue Table + Value: + Fn::Join: + - "" + - - "arn:aws:glue:" + - Ref: AWS::Region + - ":" + - Ref: AWS::AccountId + - :table/ + - Ref: GlueDatabase + - /jsongluetable + CsvErGlueTableArn: + Description: The ARN of the CSV Glue Table + Value: + Fn::Join: + - "" + - - "arn:aws:glue:" + - Ref: AWS::Region + - ":" + - Ref: AWS::AccountId + - :table/ + - Ref: GlueDatabase + - /csvgluetable + GlueDataBucketName: + Description: The name of the Glue Data Bucket + Value: + Ref: ErBucket6EA35F9D +Conditions: + CDKMetadataAvailable: + Fn::Or: + - Fn::Or: + - Fn::Equals: + - Ref: AWS::Region + - af-south-1 + - Fn::Equals: + - Ref: AWS::Region + - ap-east-1 + - Fn::Equals: + - Ref: AWS::Region + - ap-northeast-1 + - Fn::Equals: + - Ref: AWS::Region + - ap-northeast-2 + - Fn::Equals: + - Ref: AWS::Region + - ap-south-1 + - Fn::Equals: + - Ref: AWS::Region + - ap-southeast-1 + - Fn::Equals: + - Ref: AWS::Region + - ap-southeast-2 + - Fn::Equals: + - Ref: AWS::Region + - ca-central-1 + - Fn::Equals: + - Ref: AWS::Region + - cn-north-1 + - Fn::Equals: + - Ref: AWS::Region + - cn-northwest-1 + - Fn::Or: + - Fn::Equals: + - Ref: AWS::Region + - eu-central-1 + - Fn::Equals: + - Ref: AWS::Region + - eu-north-1 + - Fn::Equals: + - Ref: AWS::Region + - eu-south-1 + - Fn::Equals: + - Ref: AWS::Region + - eu-west-1 + - Fn::Equals: + - Ref: AWS::Region + - eu-west-2 + - Fn::Equals: + - Ref: AWS::Region + - eu-west-3 + - Fn::Equals: + - Ref: AWS::Region + - il-central-1 + - Fn::Equals: + - Ref: AWS::Region + - me-central-1 + - Fn::Equals: + - Ref: AWS::Region + - me-south-1 + - Fn::Equals: + - Ref: AWS::Region + - sa-east-1 + - Fn::Or: + - Fn::Equals: + - Ref: AWS::Region + - us-east-1 + - Fn::Equals: + - Ref: AWS::Region + - us-east-2 + - Fn::Equals: + - Ref: AWS::Region + - us-west-1 + - Fn::Equals: + - Ref: AWS::Region + - us-west-2 +Parameters: + BootstrapVersion: + Type: AWS::SSM::Parameter::Value + Default: /cdk-bootstrap/hnb659fds/version + Description: Version of the CDK Bootstrap resources in this environment, automatically retrieved from SSM Parameter Store. [cdk:skip] + diff --git a/javav2/example_code/entityresolution/src/test/java/EntityResTests.java b/javav2/example_code/entityresolution/src/test/java/EntityResTests.java new file mode 100644 index 00000000000..03f2c75980d --- /dev/null +++ b/javav2/example_code/entityresolution/src/test/java/EntityResTests.java @@ -0,0 +1,187 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + + +import com.example.entity.scenario.CloudFormationHelper; +import com.example.entity.scenario.EntityResActions; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.MethodOrderer; +import org.junit.jupiter.api.Order; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInstance; +import org.junit.jupiter.api.TestMethodOrder; +import software.amazon.awssdk.services.entityresolution.model.CreateSchemaMappingResponse; + +import java.util.Map; +import java.util.UUID; + +import static org.junit.jupiter.api.Assertions.assertDoesNotThrow; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +@TestInstance(TestInstance.Lifecycle.PER_METHOD) +@TestMethodOrder(MethodOrderer.OrderAnnotation.class) +public class EntityResTests { + private static final Logger logger = LoggerFactory.getLogger(EntityResTests.class); + + private static String roleARN = ""; + + private static String csvMappingARN = ""; + private static String jsonMappingARN = ""; + + private static String jobId = ""; + + private static String glueBucketName = ""; + + private static String csvGlueTableArn = ""; + + private static String jsonGlueTableArn = ""; + + private static final String STACK_NAME = "EntityResolutionCdkStack"; + + private static final String ENTITY_RESOLUTION_ROLE_ARN_KEY = "EntityResolutionRoleArn"; + private static final String GLUE_DATA_BUCKET_NAME_KEY = "GlueDataBucketName"; + private static final String JSON_GLUE_TABLE_ARN_KEY = "JsonErGlueTableArn"; + private static final String CSV_GLUE_TABLE_ARN_KEY = "CsvErGlueTableArn"; + + private static String workflowArn = ""; + private static final String jsonSchemaMappingName = "jsonschema-" + UUID.randomUUID(); + private static final String csvSchemaMappingName = "csv-" + UUID.randomUUID(); + private static final String workflowName = "workflow-"+ UUID.randomUUID(); + private static final EntityResActions actions = new EntityResActions(); + @BeforeAll + public static void setUp() { + CloudFormationHelper.deployCloudFormationStack(STACK_NAME); + Map outputsMap = CloudFormationHelper.getStackOutputsAsync(STACK_NAME).join(); + roleARN = outputsMap.get(ENTITY_RESOLUTION_ROLE_ARN_KEY); + glueBucketName = outputsMap.get(GLUE_DATA_BUCKET_NAME_KEY); + csvGlueTableArn = outputsMap.get(CSV_GLUE_TABLE_ARN_KEY); + jsonGlueTableArn = outputsMap.get(JSON_GLUE_TABLE_ARN_KEY); + + String json = """ + [ + { + "id": "1", + "name": "Alice Johnson", + "email": "alice.johnson@example.com" + }, + { + "id": "2", + "name": "Bob Smith", + "email": "bob.smith@example.com" + }, + { + "id": "3", + "name": "Charlie Black", + "email": "charlie.black@example.com" + } + ] + """; + + String csv = """ + id,name,email,phone + 1,Alice B. Johnson,alice.johnson@example.com,746-876-9846 + 2,Bob Smith Jr.,bob.smith@example.com,987-654-3210 + 3,Charlie Black,charlie.black@company.com,345-567-1234 + 7,Jane E. Doe,jane_doe@company.com,111-222-3333 + """; + + actions.uploadInputData(glueBucketName, json, csv); + } + + @Test + @Tag("IntegrationTest") + @Order(1) + public void testCreateMapping() { + assertDoesNotThrow(() -> { + CreateSchemaMappingResponse response = actions.createSchemaMappingAsync(jsonSchemaMappingName).join(); + jsonMappingARN = response.schemaArn(); + assertNotNull(jsonMappingARN); + }); + + assertDoesNotThrow(() -> { + CreateSchemaMappingResponse response = actions.createSchemaMappingAsync(csvSchemaMappingName).join(); + csvMappingARN = response.schemaArn(); + assertNotNull(csvMappingARN); + }); + logger.info("Test 1 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(2) + public void testCreateMappingWorkflow() { + assertDoesNotThrow(() -> { + workflowArn = actions.createMatchingWorkflowAsync(roleARN, workflowName, glueBucketName, jsonGlueTableArn, jsonSchemaMappingName, csvGlueTableArn, csvSchemaMappingName).join(); + assertNotNull(workflowArn); + }); + logger.info("Test 2 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(3) + public void testStartWorkflow() { + assertDoesNotThrow(() -> { + jobId = actions.startMatchingJobAsync(workflowName).join(); + assertNotNull(workflowArn); + }); + logger.info("Test 3 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(4) + public void testGetJobDetails() { + assertDoesNotThrow(() -> { + actions.getMatchingJobAsync(jobId, workflowName).join(); + }); + logger.info("Test 4 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(5) + public void testtSchemaMappingDetails() { + assertDoesNotThrow(() -> { + actions.getSchemaMappingAsync(jsonSchemaMappingName).join(); + }); + logger.info("Test 5 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(6) + public void testListSchemaMappings() { + assertDoesNotThrow(actions::ListSchemaMappings); + logger.info("Test 6 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(7) + public void testLTagResources() { + assertDoesNotThrow(() -> { + actions.tagEntityResource(csvMappingARN).join(); + }); + logger.info("Test 7 passed"); + } + + @Test + @Tag("IntegrationTest") + @Order(8) + public void testLDeleteMapping() { + assertDoesNotThrow(() -> { + logger.info("Wait 30 mins for the workflow to complete"); + Thread.sleep(1800000); + actions.deleteMatchingWorkflowAsync(workflowName).join(); + actions.deleteSchemaMappingAsync(jsonSchemaMappingName).join(); + actions.deleteSchemaMappingAsync(csvSchemaMappingName).join(); + CloudFormationHelper.emptyS3Bucket(glueBucketName); + CloudFormationHelper.destroyCloudFormationStack(STACK_NAME); + }); + logger.info("Test 8 passed"); + } +} diff --git a/javav2/example_code/forecast/src/test/java/ForecastTest.java b/javav2/example_code/forecast/src/test/java/ForecastTest.java index 81e86702d39..af6bac61cce 100644 --- a/javav2/example_code/forecast/src/test/java/ForecastTest.java +++ b/javav2/example_code/forecast/src/test/java/ForecastTest.java @@ -45,29 +45,6 @@ public static void setUp() { predARN = values.getPredARN(); forecastName = values.getForecastName() + randomNum; dataSet = values.getDataSet() + randomNum; - - // Uncomment this code block if you prefer using a config.properties file to - // retrieve AWS values required for these tests. - /* - * - * try (InputStream input = - * ForecastTest.class.getClassLoader().getResourceAsStream("config.properties")) - * { - * Properties prop = new Properties(); - * if (input == null) { - * System.out.println("Sorry, unable to find config.properties"); - * return; - * } - * - * // Populate the data members required for all tests. - * predARN = "arn:aws:forecast:us-west-2:814548047983:predictor/ScottPredictor"; - * forecastName = "forecast"+randomNum; - * dataSet = "dataSet"+randomNum; - * - * } catch (IOException ex) { - * ex.printStackTrace(); - * } - */ } @Test diff --git a/javav2/example_code/iot/README.md b/javav2/example_code/iot/README.md index e77c9b3515a..5e99310cd7d 100644 --- a/javav2/example_code/iot/README.md +++ b/javav2/example_code/iot/README.md @@ -77,8 +77,19 @@ This example shows you how to get started using AWS IoT. #### Learn the basics -This example shows you how to work with AWS IoT device management. - +This example shows you how to do the following: + +- Create an AWS IoT Thing. +- Generate a device certificate. +- Update an AWS IoT Thing with Attributes. +- Return a unique endpoint. +- List your AWS IoT certificates. +- Create an AWS IoT shadow. +- Write out state information. +- Creates a rule. +- List your rules. +- Search things using the Thing name. +- Delete an AWS IoT Thing. @@ -114,4 +125,4 @@ in the `javav2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/example_code/iotsitewise/README.md b/javav2/example_code/iotsitewise/README.md index 907d087a67a..a513b574029 100644 --- a/javav2/example_code/iotsitewise/README.md +++ b/javav2/example_code/iotsitewise/README.md @@ -79,8 +79,17 @@ This example shows you how to get started using AWS IoT SiteWise. #### Learn the basics -This example shows you how to learn core operations for AWS IoT SiteWise using an AWS SDK. - +This example shows you how to do the following: + +- Create an AWS IoT SiteWise Asset Model. +- Create an AWS IoT SiteWise Asset. +- Retrieve the property ID values. +- Send data to an AWS IoT SiteWise Asset. +- Retrieve the value of the AWS IoT SiteWise Asset property. +- Create an AWS IoT SiteWise Portal. +- Create an AWS IoT SiteWise Gateway. +- Describe the AWS IoT SiteWise Gateway. +- Delete the AWS IoT SiteWise Assets. diff --git a/javav2/example_code/iotsitewise/pom.xml b/javav2/example_code/iotsitewise/pom.xml index ce7da2101c6..e98a1f632d7 100644 --- a/javav2/example_code/iotsitewise/pom.xml +++ b/javav2/example_code/iotsitewise/pom.xml @@ -82,6 +82,10 @@ software.amazon.awssdk ssooidc
+ + software.amazon.awssdk + cloudformation + org.apache.logging.log4j log4j-core @@ -91,10 +95,6 @@ slf4j-api 2.0.13 - - software.amazon.awssdk - cloudformation - org.apache.logging.log4j log4j-slf4j2-impl diff --git a/javav2/example_code/redshift/README.md b/javav2/example_code/redshift/README.md index 74823ff8740..08fbc96fca1 100644 --- a/javav2/example_code/redshift/README.md +++ b/javav2/example_code/redshift/README.md @@ -73,8 +73,15 @@ This example shows you how to get started using Amazon Redshift. #### Learn the basics -This example shows you how to learn core operations for Amazon Redshift using an AWS SDK. +This example shows you how to do the following: +- Create a Redshift cluster. +- List databases in the cluster. +- Create a table named Movies. +- Populate the Movies table. +- Query the Movies table by year. +- Modify the Redshift cluster. +- Delete the Amazon Redshift cluster. @@ -110,4 +117,4 @@ in the `javav2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/example_code/s3/src/main/java/com/example/s3/PutBucketLogging.java b/javav2/example_code/s3/src/main/java/com/example/s3/PutBucketLogging.java index 6ae08c33c9f..631ea35563d 100644 --- a/javav2/example_code/s3/src/main/java/com/example/s3/PutBucketLogging.java +++ b/javav2/example_code/s3/src/main/java/com/example/s3/PutBucketLogging.java @@ -33,26 +33,28 @@ public static void main(String[] args) { final String usage = """ Usage: - \s + \s Where: bucketName - The Amazon S3 bucket to upload an object into. - targetBucket - The target bucket . + targetBucket - The target bucket. + accountId - The account id. """; - if (args.length != 2) { + if (args.length != 3) { System.out.println(usage); System.exit(1); } String bucketName = args[0]; String targetBucket = args[1]; + String accountId = args[2]; Region region = Region.US_EAST_1; S3Client s3 = S3Client.builder() .region(region) .build(); - setlogRequest(s3, bucketName, targetBucket); + setlogRequest(s3, bucketName, targetBucket, accountId); s3.close(); } @@ -62,10 +64,11 @@ public static void main(String[] args) { * @param s3 an instance of the {@link S3Client} used to interact with the S3 service * @param bucketName the name of the bucket for which logging needs to be enabled * @param targetBucket the name of the target bucket where the logs will be stored + * @param accountId the account Id * * @throws S3Exception if an error occurs while enabling logging for the bucket */ - public static void setlogRequest(S3Client s3, String bucketName, String targetBucket) { + public static void setlogRequest(S3Client s3, String bucketName, String targetBucket, String accountId) { try { GetBucketAclRequest aclRequest = GetBucketAclRequest.builder() .bucket(targetBucket) @@ -96,7 +99,7 @@ public static void setlogRequest(S3Client s3, String bucketName, String targetBu PutBucketLoggingRequest loggingRequest = PutBucketLoggingRequest.builder() .bucket(bucketName) - .expectedBucketOwner("814548047983") + .expectedBucketOwner(accountId) .bucketLoggingStatus(loggingStatus) .build(); diff --git a/javav2/example_code/sns/src/main/java/com/example/sns/CreateFIFOTopic.java b/javav2/example_code/sns/src/main/java/com/example/sns/CreateFIFOTopic.java index 7e32a2fdb05..c0d2aacd0e7 100644 --- a/javav2/example_code/sns/src/main/java/com/example/sns/CreateFIFOTopic.java +++ b/javav2/example_code/sns/src/main/java/com/example/sns/CreateFIFOTopic.java @@ -35,8 +35,8 @@ public static void main(String[] args) { System.exit(1); } - String fifoTopicName = "PriceUpdatesTopic3.fifo"; - String fifoQueueARN = "arn:aws:sqs:us-east-1:814548047983:MyPriceSQS.fifo"; + String fifoTopicName = args[0]; + String fifoQueueARN = args[1]; SnsClient snsClient = SnsClient.builder() .region(Region.US_EAST_1) .build(); diff --git a/javav2/example_code/ssm/README.md b/javav2/example_code/ssm/README.md index b27a8dcb3da..a6396b58b0a 100644 --- a/javav2/example_code/ssm/README.md +++ b/javav2/example_code/ssm/README.md @@ -89,8 +89,15 @@ This example shows you how to get started using Systems Manager. #### Learn the basics -This example shows you how to work with Systems Manager maintenance windows, documents, and OpsItems. +This example shows you how to do the following: +- Create a maintenance window. +- Modify the maintenance window schedule. +- Create a document. +- Send a command to a specified EC2 instance. +- Create an OpsItem. +- Update and resolve the OpsItem. +- Delete the maintenance window, OpsItem, and document. @@ -126,4 +133,4 @@ in the `javav2` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/javav2/usecases/topics_and_queues/pom.xml b/javav2/usecases/topics_and_queues/pom.xml index f102ba5b2a8..3bf6ab59b1c 100644 --- a/javav2/usecases/topics_and_queues/pom.xml +++ b/javav2/usecases/topics_and_queues/pom.xml @@ -28,8 +28,8 @@ maven-compiler-plugin 3.1 - 8 - 8 + 15 + 15 @@ -39,7 +39,7 @@ software.amazon.awssdk bom - 2.21.20 + 2.29.45 pom import @@ -87,5 +87,13 @@ gson 2.10.1 + + software.amazon.awssdk + sso + + + software.amazon.awssdk + ssooidc + diff --git a/javav2/usecases/topics_and_queues/src/main/java/com/example/sns/SNSWorkflow.java b/javav2/usecases/topics_and_queues/src/main/java/com/example/sns/SNSWorkflow.java index dd18c3b9809..9ed408659d9 100644 --- a/javav2/usecases/topics_and_queues/src/main/java/com/example/sns/SNSWorkflow.java +++ b/javav2/usecases/topics_and_queues/src/main/java/com/example/sns/SNSWorkflow.java @@ -34,11 +34,13 @@ import software.amazon.awssdk.services.sqs.model.ReceiveMessageRequest; import software.amazon.awssdk.services.sqs.model.SetQueueAttributesRequest; import software.amazon.awssdk.services.sqs.model.SqsException; + import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Scanner; + import com.google.gson.Gson; import com.google.gson.JsonArray; import com.google.gson.JsonObject; @@ -47,13 +49,13 @@ /** * Before running this Java V2 code example, set up your development * environment, including your credentials. - * + *

* For more information, see the following documentation topic: - * + *

* https://docs.aws.amazon.com/sdk-for-java/latest/developer-guide/get-started.html - * + *

* This Java example performs these tasks: - * + *

* 1. Gives the user three options to choose from. * 2. Creates an Amazon Simple Notification Service (Amazon SNS) topic. * 3. Creates an Amazon Simple Queue Service (Amazon SQS) queue. @@ -71,28 +73,28 @@ public class SNSWorkflow { public static void main(String[] args) { final String usage = "\n" + - "Usage:\n" + - " \n\n" + - "Where:\n" + - " accountId - Your AWS account Id value."; + "Usage:\n" + + " \n\n" + + "Where:\n" + + " accountId - Your AWS account Id value."; - // if (args.length != 1) { - // System.out.println(usage); - // System.exit(1); - // } + if (args.length != 1) { + System.out.println(usage); + System.exit(1); + } SnsClient snsClient = SnsClient.builder() - .region(Region.US_EAST_1) - .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) - .build(); + .region(Region.US_EAST_1) + .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) + .build(); SqsClient sqsClient = SqsClient.builder() - .region(Region.US_EAST_1) - .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) - .build(); + .region(Region.US_EAST_1) + .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) + .build(); Scanner in = new Scanner(System.in); - String accountId = "814548047983"; + String accountId = args[0]; String useFIFO; String duplication = "n"; String topicName; @@ -114,28 +116,28 @@ public static void main(String[] args) { System.out.println(DASHES); System.out.println("Welcome to messaging with topics and queues."); System.out.println("In this scenario, you will create an SNS topic and subscribe an SQS queue to the topic.\n" + - "You can select from several options for configuring the topic and the subscriptions for the queue.\n" + - "You can then post to the topic and see the results in the queue."); + "You can select from several options for configuring the topic and the subscriptions for the queue.\n" + + "You can then post to the topic and see the results in the queue."); System.out.println(DASHES); System.out.println(DASHES); System.out.println("SNS topics can be configured as FIFO (First-In-First-Out).\n" + - "FIFO topics deliver messages in order and support deduplication and message filtering.\n" + - "Would you like to work with FIFO topics? (y/n)"); + "FIFO topics deliver messages in order and support deduplication and message filtering.\n" + + "Would you like to work with FIFO topics? (y/n)"); useFIFO = in.nextLine(); if (useFIFO.compareTo("y") == 0) { selectFIFO = true; System.out.println("You have selected FIFO"); System.out.println(" Because you have chosen a FIFO topic, deduplication is supported.\n" + - " Deduplication IDs are either set in the message or automatically generated from content using a hash function.\n" - + - " If a message is successfully published to an SNS FIFO topic, any message published and determined to have the same deduplication ID,\n" - + - " within the five-minute deduplication interval, is accepted but not delivered.\n" + - " For more information about deduplication, see https://docs.aws.amazon.com/sns/latest/dg/fifo-message-dedup.html."); + " Deduplication IDs are either set in the message or automatically generated from content using a hash function.\n" + + + " If a message is successfully published to an SNS FIFO topic, any message published and determined to have the same deduplication ID,\n" + + + " within the five-minute deduplication interval, is accepted but not delivered.\n" + + " For more information about deduplication, see https://docs.aws.amazon.com/sns/latest/dg/fifo-message-dedup.html."); System.out.println( - "Would you like to use content-based deduplication instead of entering a deduplication ID? (y/n)"); + "Would you like to use content-based deduplication instead of entering a deduplication ID? (y/n)"); duplication = in.nextLine(); if (duplication.compareTo("y") == 0) { System.out.println("Please enter a group id value"); @@ -191,23 +193,25 @@ public static void main(String[] args) { // Define the policy to use. Make sure that you change the REGION if you are // running this code // in a different region. - String policy = "{\n" + - " \"Statement\": [\n" + - " {\n" + - " \"Effect\": \"Allow\",\n" + - " \"Principal\": {\n" + - " \"Service\": \"sns.amazonaws.com\"\n" + - " },\n" + - " \"Action\": \"sqs:SendMessage\",\n" + - " \"Resource\": \"arn:aws:sqs:us-east-1:" + accountId + ":" + sqsQueueName + "\",\n" + - " \"Condition\": {\n" + - " \"ArnEquals\": {\n" + - " \"aws:SourceArn\": \"arn:aws:sns:us-east-1:" + accountId + ":" + topicName + "\"\n" + - " }\n" + - " }\n" + - " }\n" + - " ]\n" + - " }"; + String policy = """ + { + "Statement": [ + { + "Effect": "Allow", + "Principal": { + "Service": "sns.amazonaws.com" + }, + "Action": "sqs:SendMessage", + "Resource": "arn:aws:sqs:us-east-1:%s:%s", + "Condition": { + "ArnEquals": { + "aws:SourceArn": "arn:aws:sns:us-east-1:%s:%s" + } + } + } + ] + } + """.formatted(accountId, sqsQueueName, accountId, topicName); setQueueAttr(sqsClient, sqsQueueUrl, policy); System.out.println(DASHES); @@ -216,13 +220,13 @@ public static void main(String[] args) { System.out.println("6. Subscribe to the SQS queue."); if (selectFIFO) { System.out.println( - "If you add a filter to this subscription, then only the filtered messages will be received in the queue.\n" - + - "For information about message filtering, see https://docs.aws.amazon.com/sns/latest/dg/sns-message-filtering.html\n" - + - "For this example, you can filter messages by a \"tone\" attribute."); + "If you add a filter to this subscription, then only the filtered messages will be received in the queue.\n" + + + "For information about message filtering, see https://docs.aws.amazon.com/sns/latest/dg/sns-message-filtering.html\n" + + + "For this example, you can filter messages by a \"tone\" attribute."); System.out.println("Would you like to filter messages for " + sqsQueueName + "'s subscription to the topic " - + topicName + "? (y/n)"); + + topicName + "? (y/n)"); String filterAns = in.nextLine(); if (filterAns.compareTo("y") == 0) { boolean moreAns = false; @@ -334,8 +338,8 @@ public static void main(String[] args) { public static void deleteSNSTopic(SnsClient snsClient, String topicArn) { try { DeleteTopicRequest request = DeleteTopicRequest.builder() - .topicArn(topicArn) - .build(); + .topicArn(topicArn) + .build(); DeleteTopicResponse result = snsClient.deleteTopic(request); System.out.println("Status was " + result.sdkHttpResponse().statusCode()); @@ -349,13 +353,13 @@ public static void deleteSNSTopic(SnsClient snsClient, String topicArn) { public static void deleteSQSQueue(SqsClient sqsClient, String queueName) { try { GetQueueUrlRequest getQueueRequest = GetQueueUrlRequest.builder() - .queueName(queueName) - .build(); + .queueName(queueName) + .build(); String queueUrl = sqsClient.getQueueUrl(getQueueRequest).queueUrl(); DeleteQueueRequest deleteQueueRequest = DeleteQueueRequest.builder() - .queueUrl(queueUrl) - .build(); + .queueUrl(queueUrl) + .build(); sqsClient.deleteQueue(deleteQueueRequest); System.out.println(queueName + " was successfully deleted."); @@ -369,12 +373,12 @@ public static void deleteSQSQueue(SqsClient sqsClient, String queueName) { public static void unSub(SnsClient snsClient, String subscriptionArn) { try { UnsubscribeRequest request = UnsubscribeRequest.builder() - .subscriptionArn(subscriptionArn) - .build(); + .subscriptionArn(subscriptionArn) + .build(); UnsubscribeResponse result = snsClient.unsubscribe(request); System.out.println("Status was " + result.sdkHttpResponse().statusCode() - + "\nSubscription was removed for " + request.subscriptionArn()); + + "\nSubscription was removed for " + request.subscriptionArn()); } catch (SnsException e) { System.err.println(e.awsErrorDetails().errorMessage()); @@ -388,16 +392,16 @@ public static void deleteMessages(SqsClient sqsClient, String queueUrl, List entries = new ArrayList<>(); for (Message msg : messages) { DeleteMessageBatchRequestEntry entry = DeleteMessageBatchRequestEntry.builder() - .id(msg.messageId()) - .build(); + .id(msg.messageId()) + .build(); entries.add(entry); } DeleteMessageBatchRequest deleteMessageBatchRequest = DeleteMessageBatchRequest.builder() - .queueUrl(queueUrl) - .entries(entries) - .build(); + .queueUrl(queueUrl) + .entries(entries) + .build(); sqsClient.deleteMessageBatch(deleteMessageBatchRequest); System.out.println("The batch delete of messages was successful"); @@ -413,17 +417,17 @@ public static List receiveMessages(SqsClient sqsClient, String queueUrl try { if (msgAttValue.isEmpty()) { ReceiveMessageRequest receiveMessageRequest = ReceiveMessageRequest.builder() - .queueUrl(queueUrl) - .maxNumberOfMessages(5) - .build(); + .queueUrl(queueUrl) + .maxNumberOfMessages(5) + .build(); return sqsClient.receiveMessage(receiveMessageRequest).messages(); } else { // We know there are filters on the message. ReceiveMessageRequest receiveRequest = ReceiveMessageRequest.builder() - .queueUrl(queueUrl) - .messageAttributeNames(msgAttValue) // Include other message attributes if needed. - .maxNumberOfMessages(5) - .build(); + .queueUrl(queueUrl) + .messageAttributeNames(msgAttValue) // Include other message attributes if needed. + .maxNumberOfMessages(5) + .build(); return sqsClient.receiveMessage(receiveRequest).messages(); } @@ -438,13 +442,13 @@ public static List receiveMessages(SqsClient sqsClient, String queueUrl public static void pubMessage(SnsClient snsClient, String message, String topicArn) { try { PublishRequest request = PublishRequest.builder() - .message(message) - .topicArn(topicArn) - .build(); + .message(message) + .topicArn(topicArn) + .build(); PublishResponse result = snsClient.publish(request); System.out - .println(result.messageId() + " Message sent. Status is " + result.sdkHttpResponse().statusCode()); + .println(result.messageId() + " Message sent. Status is " + result.sdkHttpResponse().statusCode()); } catch (SnsException e) { System.err.println(e.awsErrorDetails().errorMessage()); @@ -453,12 +457,12 @@ public static void pubMessage(SnsClient snsClient, String message, String topicA } public static void pubMessageFIFO(SnsClient snsClient, - String message, - String topicArn, - String msgAttValue, - String duplication, - String groupId, - String deduplicationID) { + String message, + String topicArn, + String msgAttValue, + String duplication, + String groupId, + String deduplicationID) { try { PublishRequest request; @@ -466,48 +470,48 @@ public static void pubMessageFIFO(SnsClient snsClient, if (msgAttValue.isEmpty()) { if (duplication.compareTo("y") == 0) { request = PublishRequest.builder() - .message(message) - .messageGroupId(groupId) - .topicArn(topicArn) - .build(); + .message(message) + .messageGroupId(groupId) + .topicArn(topicArn) + .build(); } else { request = PublishRequest.builder() - .message(message) - .messageDeduplicationId(deduplicationID) - .messageGroupId(groupId) - .topicArn(topicArn) - .build(); + .message(message) + .messageDeduplicationId(deduplicationID) + .messageGroupId(groupId) + .topicArn(topicArn) + .build(); } } else { Map messageAttributes = new HashMap<>(); messageAttributes.put(msgAttValue, MessageAttributeValue.builder() - .dataType("String") - .stringValue("true") - .build()); + .dataType("String") + .stringValue("true") + .build()); if (duplication.compareTo("y") == 0) { request = PublishRequest.builder() - .message(message) - .messageGroupId(groupId) - .topicArn(topicArn) - .build(); + .message(message) + .messageGroupId(groupId) + .topicArn(topicArn) + .build(); } else { // Create a publish request with the message and attributes. request = PublishRequest.builder() - .topicArn(topicArn) - .message(message) - .messageDeduplicationId(deduplicationID) - .messageGroupId(groupId) - .messageAttributes(messageAttributes) - .build(); + .topicArn(topicArn) + .message(message) + .messageDeduplicationId(deduplicationID) + .messageGroupId(groupId) + .messageAttributes(messageAttributes) + .build(); } } // Publish the message to the topic. PublishResponse result = snsClient.publish(request); System.out - .println(result.messageId() + " Message sent. Status was " + result.sdkHttpResponse().statusCode()); + .println(result.messageId() + " Message sent. Status was " + result.sdkHttpResponse().statusCode()); } catch (SnsException e) { System.err.println(e.awsErrorDetails().errorMessage()); @@ -522,27 +526,27 @@ public static String subQueue(SnsClient snsClient, String topicArn, String queue if (filterList.isEmpty()) { // No filter subscription is added. request = SubscribeRequest.builder() - .protocol("sqs") - .endpoint(queueArn) - .returnSubscriptionArn(true) - .topicArn(topicArn) - .build(); + .protocol("sqs") + .endpoint(queueArn) + .returnSubscriptionArn(true) + .topicArn(topicArn) + .build(); SubscribeResponse result = snsClient.subscribe(request); System.out.println("The queue " + queueArn + " has been subscribed to the topic " + topicArn + "\n" + - "with the subscription ARN " + result.subscriptionArn()); + "with the subscription ARN " + result.subscriptionArn()); return result.subscriptionArn(); } else { request = SubscribeRequest.builder() - .protocol("sqs") - .endpoint(queueArn) - .returnSubscriptionArn(true) - .topicArn(topicArn) - .build(); + .protocol("sqs") + .endpoint(queueArn) + .returnSubscriptionArn(true) + .topicArn(topicArn) + .build(); SubscribeResponse result = snsClient.subscribe(request); System.out.println("The queue " + queueArn + " has been subscribed to the topic " + topicArn + "\n" + - "with the subscription ARN " + result.subscriptionArn()); + "with the subscription ARN " + result.subscriptionArn()); String attributeName = "FilterPolicy"; Gson gson = new Gson(); @@ -556,10 +560,10 @@ public static String subQueue(SnsClient snsClient, String topicArn, String queue String updatedJsonString = gson.toJson(jsonObject); System.out.println(updatedJsonString); SetSubscriptionAttributesRequest attRequest = SetSubscriptionAttributesRequest.builder() - .subscriptionArn(result.subscriptionArn()) - .attributeName(attributeName) - .attributeValue(updatedJsonString) - .build(); + .subscriptionArn(result.subscriptionArn()) + .attributeName(attributeName) + .attributeValue(updatedJsonString) + .build(); snsClient.setSubscriptionAttributes(attRequest); return result.subscriptionArn(); @@ -580,9 +584,9 @@ public static void setQueueAttr(SqsClient sqsClient, String queueUrl, String pol attrMap.put(QueueAttributeName.POLICY, policy); SetQueueAttributesRequest attributesRequest = SetQueueAttributesRequest.builder() - .queueUrl(queueUrl) - .attributes(attrMap) - .build(); + .queueUrl(queueUrl) + .attributes(attrMap) + .build(); sqsClient.setQueueAttributes(attributesRequest); System.out.println("The policy has been successfully attached."); @@ -600,9 +604,9 @@ public static String getSQSQueueAttrs(SqsClient sqsClient, String queueUrl) { atts.add(QueueAttributeName.QUEUE_ARN); GetQueueAttributesRequest attributesRequest = GetQueueAttributesRequest.builder() - .queueUrl(queueUrl) - .attributeNames(atts) - .build(); + .queueUrl(queueUrl) + .attributeNames(atts) + .build(); GetQueueAttributesResponse response = sqsClient.getQueueAttributes(attributesRequest); Map queueAtts = response.attributesAsStrings(); @@ -619,24 +623,24 @@ public static String createQueue(SqsClient sqsClient, String queueName, Boolean Map attrs = new HashMap<>(); attrs.put(QueueAttributeName.FIFO_QUEUE, "true"); CreateQueueRequest createQueueRequest = CreateQueueRequest.builder() - .queueName(queueName) - .attributes(attrs) - .build(); + .queueName(queueName) + .attributes(attrs) + .build(); sqsClient.createQueue(createQueueRequest); System.out.println("\nGet queue url"); GetQueueUrlResponse getQueueUrlResponse = sqsClient - .getQueueUrl(GetQueueUrlRequest.builder().queueName(queueName).build()); + .getQueueUrl(GetQueueUrlRequest.builder().queueName(queueName).build()); return getQueueUrlResponse.queueUrl(); } else { CreateQueueRequest createQueueRequest = CreateQueueRequest.builder() - .queueName(queueName) - .build(); + .queueName(queueName) + .build(); sqsClient.createQueue(createQueueRequest); System.out.println("\nGet queue url"); GetQueueUrlResponse getQueueUrlResponse = sqsClient - .getQueueUrl(GetQueueUrlRequest.builder().queueName(queueName).build()); + .getQueueUrl(GetQueueUrlRequest.builder().queueName(queueName).build()); return getQueueUrlResponse.queueUrl(); } @@ -651,8 +655,8 @@ public static String createSNSTopic(SnsClient snsClient, String topicName) { CreateTopicResponse result; try { CreateTopicRequest request = CreateTopicRequest.builder() - .name(topicName) - .build(); + .name(topicName) + .build(); result = snsClient.createTopic(request); return result.topicArn(); @@ -677,9 +681,9 @@ public static String createFIFO(SnsClient snsClient, String topicName, String du } CreateTopicRequest topicRequest = CreateTopicRequest.builder() - .name(topicName) - .attributes(topicAttributes) - .build(); + .name(topicName) + .attributes(topicAttributes) + .build(); CreateTopicResponse response = snsClient.createTopic(topicRequest); return response.topicArn(); diff --git a/javav2/usecases/topics_and_queues/src/test/java/AWSSNSTest.java b/javav2/usecases/topics_and_queues/src/test/java/AWSSNSTest.java index 89c1e934b76..29b928342a3 100644 --- a/javav2/usecases/topics_and_queues/src/test/java/AWSSNSTest.java +++ b/javav2/usecases/topics_and_queues/src/test/java/AWSSNSTest.java @@ -30,7 +30,7 @@ public void TestWorkflowFIFO() throws InterruptedException { .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) .build(); - String accountId = "814548047983"; + String accountId = ""; String duplication = "n"; String topicName; @@ -161,7 +161,7 @@ public void TestWorkflowNonFIFO() throws InterruptedException { .credentialsProvider(EnvironmentVariableCredentialsProvider.create()) .build(); - String accountId = "814548047983"; + String accountId = ""; String useFIFO; String duplication = "n"; String topicName; diff --git a/kotlin/services/apigateway/build.gradle.kts b/kotlin/services/apigateway/build.gradle.kts index 96b473cfbc8..cfb8b0a2359 100644 --- a/kotlin/services/apigateway/build.gradle.kts +++ b/kotlin/services/apigateway/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:apigateway:1.0.30") - implementation("aws.sdk.kotlin:secretsmanager:1.0.30") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:apigateway") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") implementation("com.google.code.gson:gson:2.10") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/apigateway/src/test/kotlin/APIGatewayTest.kt b/kotlin/services/apigateway/src/test/kotlin/APIGatewayTest.kt index f5908bf0235..c3542890d3b 100644 --- a/kotlin/services/apigateway/src/test/kotlin/APIGatewayTest.kt +++ b/kotlin/services/apigateway/src/test/kotlin/APIGatewayTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.apigateway.ApiGatewayClient import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest @@ -19,11 +18,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class APIGatewayTest { + private val logger: Logger = LoggerFactory.getLogger(APIGatewayTest::class.java) lateinit var apiGatewayClient: ApiGatewayClient private var restApiId = "" private var httpMethod = "" @@ -34,7 +36,6 @@ class APIGatewayTest { @BeforeAll fun setup() = runBlocking { apiGatewayClient = ApiGatewayClient { region = "us-east-1" } - // Get values from AWS Secrets Manager. val random = Random() val randomNum = random.nextInt(10000 - 1 + 1) + 1 val gson = Gson() @@ -44,49 +45,34 @@ class APIGatewayTest { httpMethod = values.httpMethod.toString() restApiName = values.restApiName.toString() + randomNum stageName = values.stageName.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - - // Populate the data members required for all tests - restApiId = prop.getProperty("restApiId") - resourceId = prop.getProperty("resourceId") - httpMethod = prop.getProperty("httpMethod") - restApiName = prop.getProperty("restApiName") - stageName = prop.getProperty("stageName") - */ } @Test @Order(1) fun createRestApiTest() = runBlocking { newApiId = createAPI(restApiId).toString() - println("Test 2 passed") + logger.info("Test 1 passed") } @Test @Order(2) fun getDeploymentsTest() = runBlocking { getAllDeployments(newApiId) - println("Test 4 passed") + logger.info("Test 2 passed") } @Test @Order(3) fun getAllStagesTest() = runBlocking { getAllStages(newApiId) - println("Test 5 passed") + logger.info("Test 3 passed") } @Test @Order(4) fun deleteRestApi() = runBlocking { deleteAPI(newApiId) - println("Test 6 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -96,7 +82,6 @@ class APIGatewayTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/apigateway/src/test/resources/logback.xml b/kotlin/services/apigateway/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/apigateway/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/appsync/build.gradle.kts b/kotlin/services/appsync/build.gradle.kts index df71aa24005..dbb9265f0d8 100644 --- a/kotlin/services/appsync/build.gradle.kts +++ b/kotlin/services/appsync/build.gradle.kts @@ -27,16 +27,19 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:appsync:1.2.28") - implementation("aws.sdk.kotlin:sts:1.2.28") - implementation("aws.sdk.kotlin:s3:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:appsync") + implementation("aws.sdk.kotlin:sts") + implementation("aws.sdk.kotlin:s3") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") implementation("com.googlecode.json-simple:json-simple:1.1.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/appsync/src/test/kotlin/AppSyncTest.kt b/kotlin/services/appsync/src/test/kotlin/AppSyncTest.kt index a4c4e0344af..ede5e243617 100644 --- a/kotlin/services/appsync/src/test/kotlin/AppSyncTest.kt +++ b/kotlin/services/appsync/src/test/kotlin/AppSyncTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.example.appsync.createDS @@ -21,10 +20,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class AppSyncTest { + private val logger: Logger = LoggerFactory.getLogger(AppSyncTest::class.java) private var apiId = "" private var dsName = "" private var dsRole = "" @@ -42,17 +44,6 @@ class AppSyncTest { dsName = values.dsName.toString() dsRole = values.dsRole.toString() tableName = values.tableName.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - apiId = prop.getProperty("apiId") - dsName = prop.getProperty("dsName") - dsRole = prop.getProperty("dsRole") - tableName = prop.getProperty("tableName") - */ } @Test @@ -61,7 +52,7 @@ class AppSyncTest { runBlocking { keyId = createKey(apiId).toString() assertTrue(!keyId.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -72,7 +63,7 @@ class AppSyncTest { if (dsARN != null) { assertTrue(dsARN.isNotEmpty()) } - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -80,7 +71,7 @@ class AppSyncTest { fun getDataSource() = runBlocking { getDS(apiId, dsName) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -88,7 +79,7 @@ class AppSyncTest { fun listGraphqlApis() = runBlocking { getKeys(apiId) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -96,7 +87,7 @@ class AppSyncTest { fun listApiKeys() = runBlocking { getKeys(apiId) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -104,7 +95,7 @@ class AppSyncTest { fun deleteDataSource() = runBlocking { deleteDS(apiId, dsName) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -112,7 +103,7 @@ class AppSyncTest { fun deleteApiKey() = runBlocking { deleteKey(keyId, apiId) - println("Test 7 passed") + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { @@ -123,7 +114,6 @@ class AppSyncTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/appsync/src/test/resources/logback.xml b/kotlin/services/appsync/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/appsync/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/athena/build.gradle.kts b/kotlin/services/athena/build.gradle.kts index e1fbffb801a..8508a196f57 100644 --- a/kotlin/services/athena/build.gradle.kts +++ b/kotlin/services/athena/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:athena:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:athena") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/athena/src/test/kotlin/AthenaTest.kt b/kotlin/services/athena/src/test/kotlin/AthenaTest.kt index 3ed67ee37e7..3775eea6000 100644 --- a/kotlin/services/athena/src/test/kotlin/AthenaTest.kt +++ b/kotlin/services/athena/src/test/kotlin/AthenaTest.kt @@ -2,7 +2,6 @@ // SPDX-License-Identifier: Apache-2.0 // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -23,10 +22,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class AthenaTest { + private val logger: Logger = LoggerFactory.getLogger(AthenaTest::class.java) private var nameQuery: String? = null private var queryString: String? = null private var database: String? = null @@ -43,17 +45,6 @@ class AthenaTest { queryString = values.queryString.toString() database = values.database.toString() outputLocation = values.outputLocation.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - nameQuery = prop.getProperty("nameQuery") - queryString = prop.getProperty("queryString") - database = prop.getProperty("database") - outputLocation = prop.getProperty("outputLocation") - */ } @Test @@ -61,21 +52,21 @@ class AthenaTest { fun createNamedQueryTest() = runBlocking { queryId = createNamedQuery(queryString.toString(), nameQuery.toString(), database.toString()) queryId?.let { assertTrue(it.isNotEmpty()) } - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @Order(2) fun listNamedQueryTest() = runBlocking { listNamedQueries() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @Order(3) fun listQueryExecutionsTest() = runBlocking { listQueryIds() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -84,14 +75,14 @@ class AthenaTest { val queryExecutionId = submitAthenaQuery(queryString.toString(), database.toString(), outputLocation.toString()) waitForQueryToComplete(queryExecutionId) processResultRows(queryExecutionId) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @Order(5) fun deleteNamedQueryTest() = runBlocking { deleteQueryName(queryId) - println("Test 5 passed") + logger.info("Test 5 passed") } private suspend fun getSecretValues(): String { @@ -101,7 +92,6 @@ class AthenaTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/athena/src/test/resources/logback.xml b/kotlin/services/athena/src/test/resources/logback.xml new file mode 100644 index 00000000000..3b326892915 --- /dev/null +++ b/kotlin/services/athena/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %level %logger{36} - %msg%n + + + + + + + diff --git a/kotlin/services/autoscale/build.gradle.kts b/kotlin/services/autoscale/build.gradle.kts index 7a60f9db76e..e5143ef9274 100644 --- a/kotlin/services/autoscale/build.gradle.kts +++ b/kotlin/services/autoscale/build.gradle.kts @@ -27,10 +27,11 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:autoscaling:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:autoscaling") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") diff --git a/kotlin/services/bedrock-runtime/README.md b/kotlin/services/bedrock-runtime/README.md new file mode 100644 index 00000000000..2d5be2d977c --- /dev/null +++ b/kotlin/services/bedrock-runtime/README.md @@ -0,0 +1,77 @@ +# Amazon Bedrock Runtime code examples for the SDK for Kotlin + +## Overview + +Shows how to use the AWS SDK for Kotlin to work with Amazon Bedrock Runtime. + + +This section provides examples that show how to invoke foundation models using the Amazon Bedrock Runtime API with the AWS SDK for Kotlin. + + +_Amazon Bedrock Runtime is a fully managed service that makes it easy to use foundation models from third-party providers and Amazon._ + +## ⚠ Important + +* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + + + + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../../README.md#Prerequisites) in the `kotlin` folder. + + + +> ⚠ You must request access to a model before you can use it. If you try to use the model (with the API or console) before you have requested access to it, you will receive an error message. For more information, see [Model access](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access.html). + +### Amazon Titan Text + +- [InvokeModel](src/main/kotlin/com/example/bedrockruntime/InvokeModel.kt#L6) + + + + + +## Run the examples + +### Instructions + + + + + + + +### Tests + +⚠ Running tests might result in charges to your AWS account. + + +To find instructions for running these tests, see the [README](../../README.md#Tests) +in the `kotlin` folder. + + + + + + +## Additional resources + +- [Amazon Bedrock Runtime User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) +- [Amazon Bedrock Runtime API Reference](https://docs.aws.amazon.com/bedrock/latest/APIReference/welcome.html) +- [SDK for Kotlin Amazon Bedrock Runtime reference](https://sdk.amazonaws.com/kotlin/api/latest/bedrock-runtime/index.html) + + + + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 diff --git a/kotlin/services/bedrock-runtime/build.gradle.kts b/kotlin/services/bedrock-runtime/build.gradle.kts new file mode 100644 index 00000000000..b51ca6849ff --- /dev/null +++ b/kotlin/services/bedrock-runtime/build.gradle.kts @@ -0,0 +1,54 @@ +plugins { + kotlin("jvm") version "2.1.10" + id("org.jetbrains.kotlin.plugin.serialization") version "2.1.10" + id("org.jlleitschuh.gradle.ktlint") version "11.3.1" apply true + application +} + +group = "com.example.bedrockruntime" +version = "1.0-SNAPSHOT" + +repositories { + mavenCentral() +} + +buildscript { + repositories { + maven("https://plugins.gradle.org/m2/") + } + dependencies { + classpath("org.jlleitschuh.gradle:ktlint-gradle:11.3.1") + } +} + +dependencies { + implementation("aws.sdk.kotlin:bedrockruntime:1.4.11") + implementation("org.jetbrains.kotlinx:kotlinx-serialization-json-jvm:1.8.0") + testImplementation("org.junit.jupiter:junit-jupiter:5.11.4") +} + +application { + mainClass.set("com.example.bedrockruntime.InvokeModelKt") +} + +// Java and Kotlin configuration +kotlin { + jvmToolchain(21) +} + +java { + toolchain { + languageVersion = JavaLanguageVersion.of(21) + } +} + +tasks.test { + useJUnitPlatform() + testLogging { + events("passed", "skipped", "failed") + } + + // Define the test source set + testClassesDirs += files("build/classes/kotlin/test") + classpath += files("build/classes/kotlin/main", "build/resources/main") +} diff --git a/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/InvokeModel.kt b/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/InvokeModel.kt new file mode 100644 index 00000000000..167bccac5b0 --- /dev/null +++ b/kotlin/services/bedrock-runtime/src/main/kotlin/com/example/bedrockruntime/InvokeModel.kt @@ -0,0 +1,67 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.example.bedrockruntime + +// snippet-start:[bedrock-runtime.kotlin.InvokeModel_AmazonTitanText] +import aws.sdk.kotlin.services.bedrockruntime.BedrockRuntimeClient +import aws.sdk.kotlin.services.bedrockruntime.model.InvokeModelRequest +import kotlinx.serialization.Serializable +import kotlinx.serialization.json.Json + +/** + * Before running this Kotlin code example, set up your development environment, including your credentials. + * + * This example demonstrates how to invoke the Titan Text model (amazon.titan-text-lite-v1). + * Remember that you must enable the model before you can use it. See notes in the README.md file. + * + * For more information, see the following documentation topic: + * https://docs.aws.amazon.com/sdk-for-kotlin/latest/developer-guide/setup.html + */ +suspend fun main() { + val prompt = """ + Write a short, funny story about a time-traveling cat who + ends up in ancient Egypt at the time of the pyramids. + """.trimIndent() + + val response = invokeModel(prompt, "amazon.titan-text-lite-v1") + println("Generated story:\n$response") +} + +suspend fun invokeModel(prompt: String, modelId: String): String { + BedrockRuntimeClient { region = "eu-central-1" }.use { client -> + val request = InvokeModelRequest { + this.modelId = modelId + contentType = "application/json" + accept = "application/json" + body = """ + { + "inputText": "${prompt.replace(Regex("\\s+"), " ").trim()}", + "textGenerationConfig": { + "maxTokenCount": 1000, + "stopSequences": [], + "temperature": 1, + "topP": 0.7 + } + } + """.trimIndent().toByteArray() + } + + val response = client.invokeModel(request) + val responseBody = response.body.toString(Charsets.UTF_8) + + val jsonParser = Json { ignoreUnknownKeys = true } + return jsonParser + .decodeFromString(responseBody) + .results + .first() + .outputText + } +} + +@Serializable +private data class BedrockResponse(val results: List) + +@Serializable +private data class Result(val outputText: String) +// snippet-end:[bedrock-runtime.kotlin.InvokeModel_AmazonTitanText] diff --git a/kotlin/services/bedrock-runtime/src/test/kotlin/InvokeModelTest.kt b/kotlin/services/bedrock-runtime/src/test/kotlin/InvokeModelTest.kt new file mode 100644 index 00000000000..21d6eb43eb1 --- /dev/null +++ b/kotlin/services/bedrock-runtime/src/test/kotlin/InvokeModelTest.kt @@ -0,0 +1,24 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +import com.example.bedrockruntime.invokeModel +import kotlinx.coroutines.runBlocking +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.MethodOrderer.OrderAnnotation +import org.junit.jupiter.api.Order +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.TestInstance +import org.junit.jupiter.api.TestMethodOrder + +@TestInstance(TestInstance.Lifecycle.PER_CLASS) +@TestMethodOrder(OrderAnnotation::class) +class InvokeModelTest { + @Test + @Order(1) + fun listFoundationModels() = runBlocking { + val prompt = "What is the capital of France?" + + val answer = invokeModel(prompt, "amazon.titan-text-lite-v1") + assertTrue(answer.isNotBlank()) + } +} diff --git a/kotlin/services/cloudformation/build.gradle.kts b/kotlin/services/cloudformation/build.gradle.kts index 1dc9c01ce78..762c8c88e0f 100644 --- a/kotlin/services/cloudformation/build.gradle.kts +++ b/kotlin/services/cloudformation/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:cloudformation:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:cloudformation") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/cloudformation/src/main/kotlin/com/kotlin/cloudformation/CreateStack.kt b/kotlin/services/cloudformation/src/main/kotlin/com/kotlin/cloudformation/CreateStack.kt index de3e514054b..2d0f25024ec 100644 --- a/kotlin/services/cloudformation/src/main/kotlin/com/kotlin/cloudformation/CreateStack.kt +++ b/kotlin/services/cloudformation/src/main/kotlin/com/kotlin/cloudformation/CreateStack.kt @@ -7,7 +7,6 @@ package com.kotlin.cloudformation import aws.sdk.kotlin.services.cloudformation.CloudFormationClient import aws.sdk.kotlin.services.cloudformation.model.CreateStackRequest import aws.sdk.kotlin.services.cloudformation.model.OnFailure -import aws.sdk.kotlin.services.cloudformation.model.Parameter import kotlin.system.exitProcess // snippet-end:[cf.kotlin.create_stack.import] @@ -32,26 +31,17 @@ suspend fun main(args: Array) { val stackName = args[0] val roleARN = args[1] val location = args[2] - val key = args[3] - val value = args[4] - createCFStack(stackName, roleARN, location, key, value) + createCFStack(stackName, roleARN, location) } // snippet-start:[cf.kotlin.create_stack.main] -suspend fun createCFStack(stackNameVal: String, roleARNVal: String?, location: String?, key: String?, value: String?) { - val myParameter = - Parameter { - parameterKey = key - parameterValue = value - } - +suspend fun createCFStack(stackNameVal: String, roleARNVal: String?, location: String?) { val request = CreateStackRequest { stackName = stackNameVal templateUrl = location roleArn = roleARNVal onFailure = OnFailure.Rollback - parameters = listOf(myParameter) } CloudFormationClient { region = "us-east-1" }.use { cfClient -> diff --git a/kotlin/services/cloudformation/src/test/kotlin/CloudFormationTest.kt b/kotlin/services/cloudformation/src/test/kotlin/CloudFormationTest.kt index c5ae594ffad..27547a7fc60 100644 --- a/kotlin/services/cloudformation/src/test/kotlin/CloudFormationTest.kt +++ b/kotlin/services/cloudformation/src/test/kotlin/CloudFormationTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -18,15 +17,16 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class CloudFormationTest { + private val logger: Logger = LoggerFactory.getLogger(CloudFormationTest::class.java) private var stackName = "" private var roleARN = "" private var location = "" - private var key = "" - private var value = "" @BeforeAll fun setup() = @@ -38,27 +38,14 @@ class CloudFormationTest { stackName = values.stackName.toString() roleARN = values.roleARN.toString() location = values.location.toString() - key = values.key.toString() - value = values.value.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - stackName = prop.getProperty("stackName") - roleARN = prop.getProperty("roleARN") - location = prop.getProperty("location") - key = prop.getProperty("key") - value = prop.getProperty("value") - */ } @Test @Order(1) fun createStackTest() = runBlocking { - createCFStack(stackName, roleARN, location, key, value) - println("Test 1 passed") + createCFStack(stackName, roleARN, location) + logger.info("Test 1 passed") } @Test @@ -66,7 +53,7 @@ class CloudFormationTest { fun describeStacksTest() = runBlocking { describeAllStacks() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -74,7 +61,7 @@ class CloudFormationTest { fun getTemplateTest() = runBlocking { getSpecificTemplate(stackName) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -82,7 +69,7 @@ class CloudFormationTest { fun deleteStackTest() = runBlocking { deleteSpecificTemplate(stackName) - println("Test 4 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -93,7 +80,6 @@ class CloudFormationTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/cloudformation/src/test/resources/logback.xml b/kotlin/services/cloudformation/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/cloudformation/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/cloudtrail/build.gradle.kts b/kotlin/services/cloudtrail/build.gradle.kts index a4dc36dc59e..c2358d65c3f 100644 --- a/kotlin/services/cloudtrail/build.gradle.kts +++ b/kotlin/services/cloudtrail/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:cloudtrail:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:cloudtrail") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/cloudtrail/src/test/kotlin/CloudtrailKotlinTest.kt b/kotlin/services/cloudtrail/src/test/kotlin/CloudtrailKotlinTest.kt index aa75acfc5b1..5847d02acf9 100644 --- a/kotlin/services/cloudtrail/src/test/kotlin/CloudtrailKotlinTest.kt +++ b/kotlin/services/cloudtrail/src/test/kotlin/CloudtrailKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -22,10 +21,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class CloudtrailKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(CloudtrailKotlinTest::class.java) private var trailName = "" private var s3BucketName = "" @@ -37,17 +39,6 @@ class CloudtrailKotlinTest { val values: SecretValues = gson.fromJson(json, SecretValues::class.java) trailName = values.trailName.toString() s3BucketName = values.s3BucketName.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - trailName = prop.getProperty("trailName") - s3BucketName = prop.getProperty("s3BucketName") - */ } @Test @@ -55,7 +46,7 @@ class CloudtrailKotlinTest { fun createTrail() = runBlocking { createNewTrail(trailName, s3BucketName) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -63,7 +54,7 @@ class CloudtrailKotlinTest { fun putEventSelectors() = runBlocking { setSelector(trailName) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -71,7 +62,7 @@ class CloudtrailKotlinTest { fun getEventSelectors() = runBlocking { getSelectors(trailName) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -79,7 +70,7 @@ class CloudtrailKotlinTest { fun lookupEvents() = runBlocking { lookupAllEvents() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -87,7 +78,7 @@ class CloudtrailKotlinTest { fun describeTrails() = runBlocking { describeSpecificTrails(trailName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -96,7 +87,7 @@ class CloudtrailKotlinTest { runBlocking { startLog(trailName) stopLog(trailName) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -104,7 +95,7 @@ class CloudtrailKotlinTest { fun deleteTrail() = runBlocking { deleteSpecificTrail(trailName) - println("Test 7 passed") + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { @@ -115,7 +106,6 @@ class CloudtrailKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/cloudtrail/src/test/resources/logback.xml b/kotlin/services/cloudtrail/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/cloudtrail/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/cloudwatch/build.gradle.kts b/kotlin/services/cloudwatch/build.gradle.kts index 8a3484df187..e175829145e 100644 --- a/kotlin/services/cloudwatch/build.gradle.kts +++ b/kotlin/services/cloudwatch/build.gradle.kts @@ -28,17 +28,20 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:cloudwatch:1.2.28") - implementation("aws.sdk.kotlin:cloudwatchevents:1.2.28") - implementation("aws.sdk.kotlin:cloudwatchlogs:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:cloudwatch") + implementation("aws.sdk.kotlin:cloudwatchevents") + implementation("aws.sdk.kotlin:cloudwatchlogs") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") implementation("com.fasterxml.jackson.core:jackson-core:2.14.2") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/cloudwatch/src/test/kotlin/CloudWatchTest.kt b/kotlin/services/cloudwatch/src/test/kotlin/CloudWatchTest.kt index ef88efa4a46..08ea9a65c84 100644 --- a/kotlin/services/cloudwatch/src/test/kotlin/CloudWatchTest.kt +++ b/kotlin/services/cloudwatch/src/test/kotlin/CloudWatchTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -27,10 +26,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class CloudWatchTest { + private val logger: Logger = LoggerFactory.getLogger(CloudWatchTest::class.java) private var logGroup = "" private var alarmName = "" private var streamName = "" @@ -79,33 +81,6 @@ class CloudWatchTest { dashboardAddSc = values.dashboardAddSc.toString() settingsSc = values.settingsSc.toString() metricImageSc = values.metricImageSc.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - logGroup = prop.getProperty("logGroup") - alarmName = prop.getProperty("alarmName") - streamName = prop.getProperty("streamName") - ruleResource = prop.getProperty("ruleResource") - metricId = prop.getProperty("metricId") - filterName = prop.getProperty("filterName") - destinationArn = prop.getProperty("destinationArn") - roleArn = prop.getProperty("roleArn") - filterPattern = prop.getProperty("filterPattern") - instanceId = prop.getProperty("instanceId") - ruleName = prop.getProperty("ruleName") - ruleArn = prop.getProperty("ruleArn") - namespace = prop.getProperty("namespace") - myDateSc = prop.getProperty("myDateSc") - costDateWeekSc = prop.getProperty("costDateWeekSc") - dashboardNameSc = prop.getProperty("dashboardNameSc") - dashboardJsonSc = prop.getProperty("dashboardJsonSc") - dashboardAddSc = prop.getProperty("dashboardAddSc") - settingsSc = prop.getProperty("settingsSc") - metricImageSc = prop.getProperty("metricImageSc") - */ } @Test @@ -113,7 +88,7 @@ class CloudWatchTest { fun createAlarmTest() = runBlocking { putAlarm(alarmName, instanceId) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -121,7 +96,7 @@ class CloudWatchTest { fun describeAlarmsTest() = runBlocking { desCWAlarms() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -129,7 +104,7 @@ class CloudWatchTest { fun createSubscriptionFiltersTest() = runBlocking { putSubFilters(filterName, filterPattern, logGroup, destinationArn) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -137,7 +112,7 @@ class CloudWatchTest { fun describeSubscriptionFiltersTest() = runBlocking { describeFilters(logGroup) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -145,7 +120,7 @@ class CloudWatchTest { fun disableAlarmActionsTest() = runBlocking { disableActions(alarmName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -153,7 +128,7 @@ class CloudWatchTest { fun enableAlarmActionsTest() = runBlocking { enableActions(alarmName) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -161,7 +136,7 @@ class CloudWatchTest { fun getLogEventsTest() = runBlocking { getCWLogEvents(logGroup, streamName) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -169,7 +144,7 @@ class CloudWatchTest { fun putCloudWatchEventTest() = runBlocking { putCWEvents(ruleResource) - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -177,7 +152,7 @@ class CloudWatchTest { fun getMetricDataTest() = runBlocking { getMetData() - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -185,7 +160,7 @@ class CloudWatchTest { fun deleteSubscriptionFilterTest() = runBlocking { deleteSubFilter(filterName, logGroup) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -193,7 +168,7 @@ class CloudWatchTest { fun putRuleTest() = runBlocking { putCWRule(ruleName, ruleArn) - println("Test 11 passed") + logger.info("Test 11 passed") } @Test @@ -201,7 +176,7 @@ class CloudWatchTest { fun putLogEvents() = runBlocking { putCWLogEvents(logGroup, streamName) - println("Test 12 passed") + logger.info("Test 12 passed") } @Test @@ -209,7 +184,7 @@ class CloudWatchTest { fun deleteCWAlarmTest() = runBlocking { deleteCWAlarm(alarmName) - println("Test 13 passed") + logger.info("Test 13 passed") } private suspend fun getSecretValues(): String { @@ -220,7 +195,6 @@ class CloudWatchTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/cloudwatch/src/test/resources/logback.xml b/kotlin/services/cloudwatch/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/cloudwatch/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/codepipeline/build.gradle.kts b/kotlin/services/codepipeline/build.gradle.kts index b52524bfc71..f21e509d4ee 100644 --- a/kotlin/services/codepipeline/build.gradle.kts +++ b/kotlin/services/codepipeline/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:codepipeline:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:codepipeline") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/codepipeline/src/test/kotlin/PipelineServiceTest.kt b/kotlin/services/codepipeline/src/test/kotlin/PipelineServiceTest.kt index 98b92df33ff..b62297ac2e2 100644 --- a/kotlin/services/codepipeline/src/test/kotlin/PipelineServiceTest.kt +++ b/kotlin/services/codepipeline/src/test/kotlin/PipelineServiceTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -20,10 +19,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class PipelineServiceTest { + private val logger: Logger = LoggerFactory.getLogger(PipelineServiceTest::class.java) private var name: String = "" private var roleArn: String = "" private var s3Bucket: String = "" @@ -47,7 +49,7 @@ class PipelineServiceTest { fun createPipelineTest() = runBlocking { createNewPipeline(name, roleArn, s3Bucket, s3OutputBucket) - println("\n Test 1 passed") + logger.info("\n Test 1 passed") } @Test @@ -55,7 +57,7 @@ class PipelineServiceTest { fun startPipelineExecutionTest() = runBlocking { executePipeline(name) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -63,7 +65,7 @@ class PipelineServiceTest { fun listPipelinesTest() = runBlocking { getAllPipelines() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -71,7 +73,7 @@ class PipelineServiceTest { fun getPipelineTest() = runBlocking { getSpecificPipeline(name) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -79,7 +81,7 @@ class PipelineServiceTest { fun listPipelineExecutionsTest() = runBlocking { listExecutions(name) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -87,7 +89,7 @@ class PipelineServiceTest { fun deletePipelineTest() = runBlocking { deleteSpecificPipeline(name) - println("Test 6 passed") + logger.info("Test 6 passed") } private suspend fun getSecretValues(): String { @@ -98,7 +100,6 @@ class PipelineServiceTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/codepipeline/src/test/resources/config.properties b/kotlin/services/codepipeline/src/test/resources/config.properties deleted file mode 100644 index 090967de2b0..00000000000 --- a/kotlin/services/codepipeline/src/test/resources/config.properties +++ /dev/null @@ -1,4 +0,0 @@ -roleArn = -name = -s3Bucket = -s3OuputBucket = \ No newline at end of file diff --git a/kotlin/services/codepipeline/src/test/resources/logback.xml b/kotlin/services/codepipeline/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/codepipeline/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/cognito/build.gradle.kts b/kotlin/services/cognito/build.gradle.kts index b57f2aaf83d..61d0a1c133e 100644 --- a/kotlin/services/cognito/build.gradle.kts +++ b/kotlin/services/cognito/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:cognitoidentityprovider:1.2.28") - implementation("aws.sdk.kotlin:cognitoidentity:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:cognitoidentityprovider") + implementation("aws.sdk.kotlin:cognitoidentity") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("com.google.code.gson:gson:2.10") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/cognito/src/test/kotlin/CognitoKotlinTest.kt b/kotlin/services/cognito/src/test/kotlin/CognitoKotlinTest.kt index 1b40f078fc1..9c780c5cc13 100644 --- a/kotlin/services/cognito/src/test/kotlin/CognitoKotlinTest.kt +++ b/kotlin/services/cognito/src/test/kotlin/CognitoKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -15,7 +14,6 @@ import com.kotlin.cognito.getAllPools import com.kotlin.cognito.getPools import com.kotlin.cognito.listAllUserPoolClients import com.kotlin.cognito.listPoolIdentities -import com.kotlin.cognito.signUp import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.Assertions import org.junit.jupiter.api.BeforeAll @@ -26,11 +24,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class CognitoKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(CognitoKotlinTest::class.java) private var userPoolName = "" private var identityId = "" private var userPoolId = "" // set in test 2 @@ -78,34 +79,6 @@ class CognitoKotlinTest { userNameMVP = values.userNameMVP.toString() passwordMVP = values.passwordMVP.toString() emailMVP = values.emailMVP.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - /* - // load the properties file. - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - userPoolName = prop.getProperty("userPoolName") - identityId = prop.getProperty("identityId") - username = prop.getProperty("username") - email = prop.getProperty("email") - clientName = prop.getProperty("clientName") - identityPoolName = prop.getProperty("identityPoolName") - appId = prop.getProperty("appId") - existingUserPoolId = prop.getProperty("existingUserPoolId") - existingIdentityPoolId = prop.getProperty("existingIdentityPoolId") - providerName = prop.getProperty("providerName") - existingPoolName = prop.getProperty("existingPoolName") - clientId = prop.getProperty("clientId") - secretkey = prop.getProperty("secretkey") - password = prop.getProperty("password") - poolIdMVP = prop.getProperty("poolIdMVP") - clientIdMVP = prop.getProperty("clientIdMVP") - userNameMVP = prop.getProperty("userNameMVP") - passwordMVP = prop.getProperty("passwordMVP") - emailMVP = prop.getProperty("emailMVP") - - */ } @Test @@ -114,7 +87,7 @@ class CognitoKotlinTest { runBlocking { userPoolId = createPool(userPoolName).toString() Assertions.assertTrue(!userPoolId.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -122,95 +95,86 @@ class CognitoKotlinTest { fun createAdminUserTest() = runBlocking { createNewUser(userPoolId, username, email, password) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @Order(3) - fun signUpUserTest() = - runBlocking { - signUp(clientId, secretkey, username, password, email) - println("Test 3 passed") - } - - @Test - @Order(4) fun listUserPoolsTest() = runBlocking { getAllPools() - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun listUserPoolClientsTest() = runBlocking { listAllUserPoolClients(existingUserPoolId) - println("Test 5 passed") + logger.info("Test 4 passed") } @Test - @Order(6) + @Order(5) fun listUsersTest() = runBlocking { listAllUserPoolClients(existingUserPoolId) - println("Test 6 passed") + logger.info("Test 5 passed") } @Test - @Order(7) + @Order(6) fun describeUserPoolTest() = runBlocking { describePool(existingUserPoolId) - println("Test 7 passed") + logger.info("Test 6 passed") } @Test - @Order(8) + @Order(7) fun deleteUserPool() = runBlocking { delPool(userPoolId) - println("Test 8 passed") + logger.info("Test 7 passed") } @Test - @Order(9) + @Order(8) fun createIdentityPoolTest() = runBlocking { identityPoolId = createIdPool(identityPoolName).toString() Assertions.assertTrue(!identityPoolId.isEmpty()) - println("Test 9 passed") + logger.info("Test 8 passed") } @Test - @Order(10) + @Order(9) fun listIdentityProvidersTest() = runBlocking { getPools() - println("Test 10 passed") + logger.info("Test 9 passed") } @Test - @Order(11) + @Order(10) fun listIdentitiesTest() = runBlocking { listPoolIdentities(identityPoolId) - println("Test 11 passed") + logger.info("Test 10 passed") } @Test - @Order(12) + @Order(11) fun deleteIdentityPool() = runBlocking { deleteIdPool(identityPoolId) - println("Test 12 passed") + logger.info("Test 11 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/cognito" val valueRequest = diff --git a/kotlin/services/cognito/src/test/resources/logback.xml b/kotlin/services/cognito/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/cognito/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/comprehend/build.gradle.kts b/kotlin/services/comprehend/build.gradle.kts index 3b798201a6c..d625af30796 100644 --- a/kotlin/services/comprehend/build.gradle.kts +++ b/kotlin/services/comprehend/build.gradle.kts @@ -29,13 +29,16 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:comprehend:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:comprehend") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/comprehend/src/test/java/ComprehendKotlinTest.kt b/kotlin/services/comprehend/src/test/java/ComprehendKotlinTest.kt index 59722496f81..331996bfe43 100644 --- a/kotlin/services/comprehend/src/test/java/ComprehendKotlinTest.kt +++ b/kotlin/services/comprehend/src/test/java/ComprehendKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -19,10 +18,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class ComprehendKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(ComprehendKotlinTest::class.java) private val text = """ Amazon.com, Inc. is located in Seattle, WA and was founded July 5th, 1994 by Jeff Bezos, allowing customers to buy everything from books to blenders. @@ -43,21 +45,6 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - dataAccessRoleArn = values.dataAccessRoleArn.toString() s3Uri = values.s3Uri.toString() documentClassifierName = values.documentClassifier.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - - // Populate the data members required for all tests. - dataAccessRoleArn = prop.getProperty("dataAccessRoleArn") - s3Uri = prop.getProperty("s3Uri") - documentClassifierName = prop.getProperty("documentClassifier") - */ } @Test @@ -65,7 +52,7 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - fun detectEntitiesTest() = runBlocking { detectAllEntities(text) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -73,7 +60,7 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - fun detectKeyPhrasesTest() = runBlocking { detectAllKeyPhrases(text) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -81,7 +68,7 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - fun detectLanguageTest() = runBlocking { detectTheDominantLanguage(frText) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -89,7 +76,7 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - fun detectSentimentTest() = runBlocking { detectSentiments(text) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -97,14 +84,13 @@ Seattle is north of Portland and south of Vancouver, BC. Other notable Seattle - fun detectSyntaxTest() = runBlocking { detectAllSyntax(text) - println("Test 5 passed") + logger.info("Test 5 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/comprehend" val valueRequest = diff --git a/kotlin/services/comprehend/src/test/resources/logback.xml b/kotlin/services/comprehend/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/comprehend/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/dynamodb/build.gradle.kts b/kotlin/services/dynamodb/build.gradle.kts index 7afc034c5c3..e96691e6290 100644 --- a/kotlin/services/dynamodb/build.gradle.kts +++ b/kotlin/services/dynamodb/build.gradle.kts @@ -27,10 +27,11 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:dynamodb:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:dynamodb") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation(kotlin("reflect")) @@ -38,6 +39,8 @@ dependencies { implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") implementation("com.google.code.gson:gson:2.10.1") implementation("com.googlecode.json-simple:json-simple:1.1.1") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/dynamodb/src/test/kotlin/DynamoDB.kt b/kotlin/services/dynamodb/src/test/kotlin/DynamoDB.kt index 28ccbb33f2b..edd7794c709 100644 --- a/kotlin/services/dynamodb/src/test/kotlin/DynamoDB.kt +++ b/kotlin/services/dynamodb/src/test/kotlin/DynamoDB.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.dynamodb.DynamoDbClient import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest @@ -41,10 +40,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class DynamoDB { + private val logger: Logger = LoggerFactory.getLogger(DynamoDB::class.java) var tableName: String = "" var fileName: String = "" var tableName2: String = "" @@ -76,27 +78,6 @@ class DynamoDB { songTitle = values.songTitleVal.toString() songTitleVal = values.songTitleVal.toString() tableName2 = "Movies" - - /* - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = java.util.Properties() - - // load the properties file. - prop.load(input) - tableName = prop.getProperty("tableName") - tableName2 = prop.getProperty("tableName2") - fileName = prop.getProperty("fileName") - key = prop.getProperty("key") - keyValue = prop.getProperty("keyValue") - albumTitle = prop.getProperty("albumTitle") - albumTitleValue = prop.getProperty("albumTitleValue") - awards = prop.getProperty("awards") - awardVal = prop.getProperty("awardVal") - songTitle = prop.getProperty("songTitle") - songTitleVal = prop.getProperty("songTitleVal") - modAwardVal = prop.getProperty("modAwardVal") - */ } @Test @@ -104,7 +85,7 @@ class DynamoDB { fun createTableTest() = runBlocking { createNewTable(tableName, key) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -112,7 +93,7 @@ class DynamoDB { fun describeTableTest() = runBlocking { describeDymamoDBTable(tableName) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -130,7 +111,7 @@ class DynamoDB { songTitle, songTitleVal, ) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -138,7 +119,7 @@ class DynamoDB { fun listTablesTest() = runBlocking { listAllTables() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -146,7 +127,7 @@ class DynamoDB { fun updateItemTest() = runBlocking { updateTableItem(tableName, key, keyValue, awards, modAwardVal) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -154,7 +135,7 @@ class DynamoDB { fun getItemTest() = runBlocking { getSpecificItem(tableName, key, keyValue) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -162,7 +143,7 @@ class DynamoDB { fun queryTableTest() = runBlocking { queryDynTable(tableName, key, keyValue, "#a") - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -170,7 +151,7 @@ class DynamoDB { fun dynamoDBScanTest() = runBlocking { scanItems(tableName) - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -178,7 +159,7 @@ class DynamoDB { fun deleteItemTest() = runBlocking { com.kotlin.dynamodb.deleteDynamoDBItem(tableName, key, keyValue) - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -186,7 +167,7 @@ class DynamoDB { fun deleteTableTest() = runBlocking { deleteDynamoDBTable(tableName) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -198,6 +179,7 @@ class DynamoDB { getMovie(tableName2, "year", "1933") scanMovies(tableName2) deletIssuesTable(tableName2) + logger.info("Test 11 passed") } @Test @@ -213,6 +195,7 @@ class DynamoDB { updateTableItemPartiQL(ddb) queryTablePartiQL(ddb) deleteTablePartiQL(tableNamePartiQ) + logger.info("Test 12 passed") } @Test @@ -227,13 +210,13 @@ class DynamoDB { updateTableItemBatchBatch(ddb) deleteItemsBatch(ddb) deleteTablePartiQLBatch(tableNamePartiQBatch) + logger.info("Test 13 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/dynamodb" val valueRequest = diff --git a/kotlin/services/dynamodb/src/test/resources/logback.xml b/kotlin/services/dynamodb/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/dynamodb/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/ec2/build.gradle.kts b/kotlin/services/ec2/build.gradle.kts index d764ee73b7a..9b7a34b844c 100644 --- a/kotlin/services/ec2/build.gradle.kts +++ b/kotlin/services/ec2/build.gradle.kts @@ -28,14 +28,17 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:ec2:1.2.28") - implementation("aws.sdk.kotlin:ssm:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:ec2") + implementation("aws.sdk.kotlin:ssm") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/ec2/src/test/kotlin/EC2Test.kt b/kotlin/services/ec2/src/test/kotlin/EC2Test.kt index 89b12ce41cd..41ed9e91e22 100644 --- a/kotlin/services/ec2/src/test/kotlin/EC2Test.kt +++ b/kotlin/services/ec2/src/test/kotlin/EC2Test.kt @@ -1,43 +1,23 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson -import com.kotlin.ec2.DASHES -import com.kotlin.ec2.allocateAddressSc -import com.kotlin.ec2.associateAddressSc import com.kotlin.ec2.createEC2Instance import com.kotlin.ec2.createEC2KeyPair import com.kotlin.ec2.createEC2SecurityGroup -import com.kotlin.ec2.createEC2SecurityGroupSc -import com.kotlin.ec2.createKeyPairSc import com.kotlin.ec2.deleteEC2SecGroup -import com.kotlin.ec2.deleteEC2SecGroupSc import com.kotlin.ec2.deleteKeys -import com.kotlin.ec2.deleteKeysSc import com.kotlin.ec2.describeEC2Account import com.kotlin.ec2.describeEC2Address import com.kotlin.ec2.describeEC2Instances -import com.kotlin.ec2.describeEC2InstancesSc import com.kotlin.ec2.describeEC2Keys -import com.kotlin.ec2.describeEC2KeysSc import com.kotlin.ec2.describeEC2RegionsAndZones import com.kotlin.ec2.describeEC2SecurityGroups import com.kotlin.ec2.describeEC2Vpcs -import com.kotlin.ec2.describeImageSc -import com.kotlin.ec2.describeSecurityGroupsSc -import com.kotlin.ec2.disassociateAddressSc import com.kotlin.ec2.findRunningEC2Instances -import com.kotlin.ec2.getInstanceTypesSc -import com.kotlin.ec2.getParaValuesSc -import com.kotlin.ec2.releaseEC2AddressSc -import com.kotlin.ec2.runInstanceSc -import com.kotlin.ec2.startInstanceSc -import com.kotlin.ec2.stopInstanceSc import com.kotlin.ec2.terminateEC2 -import com.kotlin.ec2.terminateEC2Sc import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.BeforeAll @@ -48,12 +28,15 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.io.IOException import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class EC2Test { + private val logger: Logger = LoggerFactory.getLogger(EC2Test::class.java) private var instanceId = "" // Gets set in test 2. private var ami = "" private var instanceName = "" @@ -92,38 +75,6 @@ class EC2Test { groupNameSc = values.groupNameSc.toString() + randomNum vpcIdSc = values.vpcIdSc.toString() myIpAddressSc = values.myIpAddressSc.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - try { - EC2Test::class.java.classLoader.getResourceAsStream("config.properties").use { input -> - val prop = Properties() - if (input == null) { - println("Sorry, unable to find config.properties") - return - } - prop.load(input) - - // Populate the data members required for all tests. - ami = prop.getProperty("ami") - instanceName = prop.getProperty("instanceName") - keyName = prop.getProperty("keyName") - groupName = prop.getProperty("groupName") - groupDesc = prop.getProperty("groupDesc") - vpcId = prop.getProperty("vpcId") - - keyNameSc = prop.getProperty("keyNameSc") - fileNameSc = prop.getProperty("fileNameSc") - groupDescSc = prop.getProperty("groupDescSc") - groupNameSc = prop.getProperty("groupNameSc") - vpcIdSc = prop.getProperty("vpcIdSc") - myIpAddressSc = prop.getProperty("myIpAddressSc") - } - } catch (ex: IOException) { - ex.printStackTrace() - } - */ } @Test @@ -132,7 +83,7 @@ class EC2Test { runBlocking { instanceId = createEC2Instance(instanceName, ami).toString() assertTrue(instanceId.isNotEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -140,7 +91,7 @@ class EC2Test { fun createKeyPairTest() = runBlocking { createEC2KeyPair(keyName) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -148,7 +99,7 @@ class EC2Test { fun describeKeyPairTest() = runBlocking { describeEC2Keys() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -156,7 +107,7 @@ class EC2Test { fun deleteKeyPairTest() = runBlocking { deleteKeys(keyName) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -165,7 +116,7 @@ class EC2Test { runBlocking { groupId = createEC2SecurityGroup(groupName, groupDesc, vpcId).toString() assertTrue(groupId.isNotEmpty()) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -173,7 +124,7 @@ class EC2Test { fun describeSecurityGroupTest() = runBlocking { describeEC2SecurityGroups(groupId) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -181,7 +132,7 @@ class EC2Test { fun deleteSecurityGroupTest() = runBlocking { deleteEC2SecGroup(groupId) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -189,7 +140,7 @@ class EC2Test { fun describeAccountTest() = runBlocking { describeEC2Account() - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -197,7 +148,7 @@ class EC2Test { fun describeInstancesTest() = runBlocking { describeEC2Instances() - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -205,7 +156,7 @@ class EC2Test { fun describeRegionsAndZonesTest() = runBlocking { describeEC2RegionsAndZones() - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -213,7 +164,7 @@ class EC2Test { fun describeVPCsTest() = runBlocking { describeEC2Vpcs(vpcId) - println("Test 11 passed") + logger.info("Test 11 passed") } @Test @@ -221,7 +172,7 @@ class EC2Test { fun findRunningInstancesTest() = runBlocking { findRunningEC2Instances() - println("Test 12 passed") + logger.info("Test 12 passed") } @Test @@ -229,7 +180,7 @@ class EC2Test { fun describeAddressesTest() = runBlocking { describeEC2Address() - println("Test 13 passed") + logger.info("Test 13 passed") } @Test @@ -237,125 +188,7 @@ class EC2Test { fun terminateInstanceTest() = runBlocking { terminateEC2(instanceId) - println("Test 14 passed") - } - - @Test - @Order(15) - fun fullEC2ScenarioTest() = - runBlocking { - var newInstanceId: String - println(DASHES) - println("1. Create an RSA key pair and save the private key material as a .pem file.") - createKeyPairSc(keyNameSc, fileNameSc) - println(DASHES) - - println(DASHES) - println("2. List key pairs.") - describeEC2KeysSc() - println(DASHES) - - println(DASHES) - println("3. Create a security group.") - val groupId = createEC2SecurityGroupSc(groupNameSc, groupDescSc, vpcIdSc, myIpAddressSc) - groupId?.let { assertTrue(it.isNotEmpty()) } - println(DASHES) - - println(DASHES) - println("4. Display security group info for the newly created security group.") - describeSecurityGroupsSc(groupId.toString()) - println(DASHES) - - println(DASHES) - println("5. Get a list of Amazon Linux 2 AMIs and select one with amzn2 in the name.") - val instanceId = getParaValuesSc() - instanceId?.let { assertTrue(it.isNotEmpty()) } - println("The instance ID is $instanceId") - println(DASHES) - - println(DASHES) - println("6. Get more information about an amzn2 image and return the AMI value.") - val amiValue = instanceId?.let { describeImageSc(it) } - amiValue?.let { assertTrue(it.isNotEmpty()) } - println("The AMI value is $amiValue.") - println(DASHES) - - println(DASHES) - println("7. Get a list of instance types.") - var instanceType = getInstanceTypesSc() - assertTrue(instanceType.isNotEmpty()) - println(DASHES) - - println(DASHES) - println("8. Create an instance.") - instanceType = "m5.large" - println("Wait 1 min before creating the instance using $instanceType") - // TimeUnit.MINUTES.sleep(1) - newInstanceId = runInstanceSc(instanceType, keyNameSc, groupNameSc, amiValue.toString()) - assertTrue(newInstanceId.isNotEmpty()) - println(DASHES) - - println(DASHES) - println("9. Display information about the running instance.") - var ipAddress = describeEC2InstancesSc(newInstanceId) - assertTrue(ipAddress.isNotEmpty()) - println("You can SSH to the instance using this command:") - println("ssh -i " + fileNameSc + "ec2-user@" + ipAddress) - println(DASHES) - - println(DASHES) - println("10. Stop the instance.") - stopInstanceSc(newInstanceId) - println(DASHES) - - println(DASHES) - println("11. Start the instance.") - startInstanceSc(newInstanceId) - ipAddress = describeEC2InstancesSc(newInstanceId) - ipAddress.let { assertTrue(it.isNotEmpty()) } - println("You can SSH to the instance using this command:") - println("ssh -i " + fileNameSc + "ec2-user@" + ipAddress) - println(DASHES) - - println(DASHES) - println("12. Allocate an Elastic IP and associate it with the instance.") - val allocationId = allocateAddressSc() - allocationId?.let { assertTrue(it.isNotEmpty()) } - val associationId = associateAddressSc(newInstanceId, allocationId) - associationId?.let { assertTrue(it.isNotEmpty()) } - println("The associate Id value is $associationId") - println(DASHES) - - println(DASHES) - println("13. Describe the instance again.") - ipAddress = describeEC2InstancesSc(newInstanceId) - ipAddress.let { assertTrue(it.isNotEmpty()) } - println("You can SSH to the instance using this command:") - println("ssh -i " + fileNameSc + "ec2-user@" + ipAddress) - println(DASHES) - - println(DASHES) - println("14. Disassociate and release the Elastic IP address.") - disassociateAddressSc(associationId) - releaseEC2AddressSc(allocationId) - println(DASHES) - - println(DASHES) - println("15. Terminate the instance and use a waiter.") - terminateEC2Sc(newInstanceId) - println(DASHES) - - println(DASHES) - println("16. Delete the security group.") - if (groupId != null) { - deleteEC2SecGroupSc(groupId) - } - println(DASHES) - - println(DASHES) - println("17. Delete the key pair.") - deleteKeysSc(keyNameSc) - println(DASHES) + logger.info("Test 14 passed") } private suspend fun getSecretValues(): String { @@ -366,7 +199,6 @@ class EC2Test { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/ec2/src/test/resources/logback.xml b/kotlin/services/ec2/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/ec2/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/ecr/build.gradle.kts b/kotlin/services/ecr/build.gradle.kts index 2373c2b1a25..8f953929622 100644 --- a/kotlin/services/ecr/build.gradle.kts +++ b/kotlin/services/ecr/build.gradle.kts @@ -27,10 +27,11 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:ecr:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:ecr") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") @@ -38,7 +39,8 @@ dependencies { implementation("com.github.docker-java:docker-java-core:3.3.6") implementation("com.github.docker-java:docker-java-transport-httpclient5:3.3.6") implementation("com.github.docker-java:docker-java:3.3.6") - // implementation("ch.qos.logback:logback-classic:1.2.11") // Updated Logback version + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/ecr/src/main/kotlin/com/example/ecr/scenario/ECRScenario.kt b/kotlin/services/ecr/src/main/kotlin/com/example/ecr/scenario/ECRScenario.kt index 5901ea85491..2de5e4985df 100644 --- a/kotlin/services/ecr/src/main/kotlin/com/example/ecr/scenario/ECRScenario.kt +++ b/kotlin/services/ecr/src/main/kotlin/com/example/ecr/scenario/ECRScenario.kt @@ -40,14 +40,14 @@ suspend fun main(args: Array) { """.trimIndent() - // if (args.size != 2) { - // println(usage) - // return - // } + if (args.size != 2) { + println(usage) + return + } - var iamRole = "arn:aws:iam::814548047983:role/Admin" + var iamRole = args[0] var localImageName: String - var accountId = "814548047983" + var accountId = args[1] val ecrActions = ECRActions() val scanner = Scanner(System.`in`) diff --git a/kotlin/services/ecr/src/test/kotlin/ECRTest.kt b/kotlin/services/ecr/src/test/kotlin/ECRTest.kt index 7660f08b155..043b12928af 100644 --- a/kotlin/services/ecr/src/test/kotlin/ECRTest.kt +++ b/kotlin/services/ecr/src/test/kotlin/ECRTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.example.ecr.listImageTags @@ -17,10 +16,13 @@ import org.junit.jupiter.api.Tag import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class ECRTest { + private val logger: Logger = LoggerFactory.getLogger(ECRTest::class.java) private var repoName = "" private var newRepoName = "" private var iamRole = "" @@ -39,35 +41,19 @@ class ECRTest { repoName = values.existingRepo.toString() } - @Test - @Order(1) - fun testScenario() = - runBlocking { - ecrActions?.createECRRepository(newRepoName) - ecrActions?.setRepoPolicy(newRepoName, iamRole) - ecrActions?.getRepoPolicy(newRepoName) - ecrActions?.getRepositoryURI(newRepoName) - ecrActions?.setLifeCyclePolicy(newRepoName) - ecrActions?.pushDockerImage(newRepoName, newRepoName) - ecrActions?.verifyImage(newRepoName, newRepoName) - ecrActions?.deleteECRRepository(newRepoName) - println("Test 1 passed") - } - @Test @Tag("IntegrationTest") - @Order(2) + @Order(1) fun testHello() = runBlocking { listImageTags(repoName) - println("Test 2 passed") + logger.info("Test 1 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/ecr" val valueRequest = diff --git a/kotlin/services/ecr/src/test/resources/logback.xml b/kotlin/services/ecr/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/ecr/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/ecs/build.gradle.kts b/kotlin/services/ecs/build.gradle.kts index 5c84d3828aa..883da7c1ac5 100644 --- a/kotlin/services/ecs/build.gradle.kts +++ b/kotlin/services/ecs/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:ecs:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:ecs") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.google.code.gson:gson:2.10") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/ecs/src/test/kotlin/ESCTest.kt b/kotlin/services/ecs/src/test/kotlin/ESCTest.kt index 5fda8e76bd3..2e22d3a86f4 100644 --- a/kotlin/services/ecs/src/test/kotlin/ESCTest.kt +++ b/kotlin/services/ecs/src/test/kotlin/ESCTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -21,11 +20,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class ESCTest { + private val logger: Logger = LoggerFactory.getLogger(ESCTest::class.java) var clusterName = "" var clusterARN = "" var securityGroups: String = "" @@ -34,7 +36,6 @@ class ESCTest { var serviceName: String = "" var serviceArn: String = "" var taskDefinition: String = "" - var clusterArn: String = "arn:aws:ecs:us-east-1:814548047983:cluster/ScottCluste11" @BeforeAll fun setup() = @@ -49,21 +50,6 @@ class ESCTest { securityGroups = values.securityGroups.toString() serviceName = values.serviceName.toString() + UUID.randomUUID() taskDefinition = values.taskDefinition.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - // val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - // val prop = Properties() - - // load the properties file. - // prop.load(input) - - // Populate the data members required for all tests - // clusterName = prop.getProperty("clusterName") - // taskId = prop.getProperty("taskId") - // subnet = prop.getProperty("subnet") - // securityGroups = prop.getProperty("securityGroups") - // serviceName = prop.getProperty("serviceName") - // taskDefinition = prop.getProperty("taskDefinition") } @Test @@ -71,15 +57,15 @@ class ESCTest { fun createClusterTest() = runBlocking { clusterARN = createGivenCluster(clusterName).toString() - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @Order(2) fun createServiceTest() = runBlocking { - serviceArn = createNewService(clusterArn, serviceName, securityGroups, subnet, taskDefinition).toString() - println("Test 2 passed") + serviceArn = createNewService(clusterARN, serviceName, securityGroups, subnet, taskDefinition).toString() + logger.info("Test 2 passed") } @Test @@ -87,46 +73,45 @@ class ESCTest { fun listClustersTest() = runBlocking { listAllClusters() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @Order(4) fun describeClustersTest() = runBlocking { - descCluster(clusterArn) - println("Test 4 passed") + descCluster(clusterARN) + logger.info("Test 4 passed") } @Test @Order(5) fun listTaskDefinitionsTest() = runBlocking { - getAllTasks(clusterArn, taskId) - println("Test 5 passed") + getAllTasks(clusterARN, taskId) + logger.info("Test 5 passed") } @Test @Order(6) fun updateServiceTest() = runBlocking { - updateSpecificService(clusterArn, serviceArn) - println("Test 6 passed") + updateSpecificService(clusterARN, serviceArn) + logger.info("Test 6 passed") } @Test @Order(7) fun deleteServiceTest() = runBlocking { - deleteSpecificService(clusterArn, serviceArn) - println("Test 7 passed") + deleteSpecificService(clusterARN, serviceArn) + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/ecs" val valueRequest = diff --git a/kotlin/services/ecs/src/test/resources/logback.xml b/kotlin/services/ecs/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/ecs/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/elasticbeanstalk/build.gradle.kts b/kotlin/services/elasticbeanstalk/build.gradle.kts index 3ebf39a5304..f9a4dfad8e6 100644 --- a/kotlin/services/elasticbeanstalk/build.gradle.kts +++ b/kotlin/services/elasticbeanstalk/build.gradle.kts @@ -28,12 +28,15 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:elasticbeanstalk:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:elasticbeanstalk") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/elasticbeanstalk/src/test/kotlin/ElasticBeanstalkTest.kt b/kotlin/services/elasticbeanstalk/src/test/kotlin/ElasticBeanstalkTest.kt index 4d2628b90b5..2359788ffbe 100644 --- a/kotlin/services/elasticbeanstalk/src/test/kotlin/ElasticBeanstalkTest.kt +++ b/kotlin/services/elasticbeanstalk/src/test/kotlin/ElasticBeanstalkTest.kt @@ -16,6 +16,8 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.io.IOException import java.net.URISyntaxException import java.util.Random @@ -24,6 +26,7 @@ import java.util.concurrent.TimeUnit @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class ElasticBeanstalkTest { + private val logger: Logger = LoggerFactory.getLogger(ElasticBeanstalkTest::class.java) var appName: String = "TestApp" var envName: String = "TestEnv" var appArn: String = "" @@ -42,7 +45,7 @@ class ElasticBeanstalkTest { @Order(1) fun whenInitializingAWSService_thenNotNull() { Assertions.assertNotNull(appName) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -51,7 +54,7 @@ class ElasticBeanstalkTest { runBlocking { appArn = createApp(appName) assertTrue(!appArn.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -60,7 +63,7 @@ class ElasticBeanstalkTest { runBlocking { envArn = createEBEnvironment(envName, appName) assertTrue(!envArn.isEmpty()) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -68,7 +71,7 @@ class ElasticBeanstalkTest { fun describeApplications() = runBlocking { describeApps() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -76,7 +79,7 @@ class ElasticBeanstalkTest { fun describeEnvironment() = runBlocking { describeEnv(appName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -84,7 +87,7 @@ class ElasticBeanstalkTest { fun describeOptions() = runBlocking { getOptions(envName) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -94,6 +97,6 @@ class ElasticBeanstalkTest { println("*** Wait for 5 MIN so the app can be deleted") TimeUnit.MINUTES.sleep(5) deleteApp(appName) - println("Test 7 passed") + logger.info("Test 7 passed") } } diff --git a/kotlin/services/elasticbeanstalk/src/test/resources/logback.xml b/kotlin/services/elasticbeanstalk/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/elasticbeanstalk/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/emr/build.gradle.kts b/kotlin/services/emr/build.gradle.kts index f15411d7126..a9e812ccb11 100644 --- a/kotlin/services/emr/build.gradle.kts +++ b/kotlin/services/emr/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:emr:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:emr") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/emr/src/test/kotlin/EMRTest.kt b/kotlin/services/emr/src/test/kotlin/EMRTest.kt index 8b08ec3c800..8de73b94eb3 100644 --- a/kotlin/services/emr/src/test/kotlin/EMRTest.kt +++ b/kotlin/services/emr/src/test/kotlin/EMRTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -15,17 +14,19 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.io.IOException @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class EMRTest { + private val logger: Logger = LoggerFactory.getLogger(EMRTest::class.java) private var jar = "" private var myClass = "" private var keys = "" private var logUri = "" private var name = "" - private var jobFlowId = "" private var existingClusterId = "" @BeforeAll @@ -42,31 +43,6 @@ class EMRTest { logUri = values.logUri.toString() name = values.name.toString() existingClusterId = values.existingClusterId.toString() - - /* - try { - EMRTest::class.java.classLoader.getResourceAsStream("config.properties").use { input -> - val prop = Properties() - if (input == null) { - println("Sorry, unable to find config.properties") - return - } - - // load a properties file from class path, inside static method - prop.load(input) - - // Populate the data members required for all tests - jar = prop.getProperty("jar") - myClass = prop.getProperty("myClass") - keys = prop.getProperty("keys") - logUri = prop.getProperty("logUri") - name = prop.getProperty("name") - existingClusterId = prop.getProperty("existingClusterId") - } - } catch (ex: IOException) { - ex.printStackTrace() - } - */ } @Test @@ -74,7 +50,7 @@ class EMRTest { fun listClustersTest() = runBlocking { listAllClusters() - println("Test 3 passed") + logger.info("Test 1 passed") } private suspend fun getSecretValues(): String { @@ -85,7 +61,6 @@ class EMRTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/emr/src/test/resources/logback.xml b/kotlin/services/emr/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/emr/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/eventbridge/build.gradle.kts b/kotlin/services/eventbridge/build.gradle.kts index b0ddbf2683a..b5a4f8712c4 100644 --- a/kotlin/services/eventbridge/build.gradle.kts +++ b/kotlin/services/eventbridge/build.gradle.kts @@ -28,14 +28,17 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:eventbridge:1.2.28") - implementation("aws.sdk.kotlin:iam:1.2.28") - implementation("aws.sdk.kotlin:sns:1.2.28") - implementation("aws.sdk.kotlin:s3:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:eventbridge") + implementation("aws.sdk.kotlin:iam") + implementation("aws.sdk.kotlin:sns") + implementation("aws.sdk.kotlin:s3") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/eventbridge/src/test/kotlin/EventBridgeKotlinTest.kt b/kotlin/services/eventbridge/src/test/kotlin/EventBridgeKotlinTest.kt index 5963c275848..22d2656a729 100644 --- a/kotlin/services/eventbridge/src/test/kotlin/EventBridgeKotlinTest.kt +++ b/kotlin/services/eventbridge/src/test/kotlin/EventBridgeKotlinTest.kt @@ -12,10 +12,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class EventBridgeKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(EventBridgeKotlinTest::class.java) private var roleNameSc = "" private var bucketNameSc = "" private var topicNameSc = "" @@ -28,6 +31,6 @@ class EventBridgeKotlinTest { fun helloEventBridgeTest() = runBlocking { listBusesHello() - println("Test 1 passed") + logger.info("Test 1 passed") } } diff --git a/kotlin/services/eventbridge/src/test/resources/logback.xml b/kotlin/services/eventbridge/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/eventbridge/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/firehose/build.gradle.kts b/kotlin/services/firehose/build.gradle.kts index 937d8ec3a90..74a5cd33eed 100644 --- a/kotlin/services/firehose/build.gradle.kts +++ b/kotlin/services/firehose/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:firehose:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:firehose") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") implementation("com.google.code.gson:gson:2.10") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/firehose/src/test/kotlin/FirehoseTest.kt b/kotlin/services/firehose/src/test/kotlin/FirehoseTest.kt index f02ba3e9864..d5b729794c2 100644 --- a/kotlin/services/firehose/src/test/kotlin/FirehoseTest.kt +++ b/kotlin/services/firehose/src/test/kotlin/FirehoseTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -19,12 +18,15 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID import java.util.concurrent.TimeUnit @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class FirehoseTest { + private val logger: Logger = LoggerFactory.getLogger(FirehoseTest::class.java) private var bucketARN = "" private var roleARN = "" private var newStream = "" @@ -42,63 +44,49 @@ class FirehoseTest { roleARN = values.roleARN.toString() newStream = values.newStream.toString() + UUID.randomUUID() textValue = values.textValue.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - bucketARN = prop.getProperty("bucketARN") - roleARN = prop.getProperty("roleARN") - newStream = prop.getProperty("newStream") - textValue = prop.getProperty("textValue") - existingStream = prop.getProperty("existingStream") - delStream = prop.getProperty("delStream") - */ } @Test - @Order(2) + @Order(1) fun createDeliveryStreamTest() = runBlocking { createStream(bucketARN, roleARN, newStream) - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(3) + @Order(2) fun putRecordsTest() = runBlocking { // Wait for the resource to become available println("Wait 15 mins for resource to become available.") TimeUnit.MINUTES.sleep(15) putSingleRecord(textValue, newStream) - println("Test 3 passed") + logger.info("Test 2 passed") } @Test - @Order(4) + @Order(3) fun putBatchRecordsTest() = runBlocking { addStockTradeData(newStream) - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun listDeliveryStreamsTest() = runBlocking { listStreams() - println("Test 5 passed") + logger.info("Test 4 passed") } @Test - @Order(6) + @Order(5) fun deleteStreamTest() = runBlocking { delStream(newStream) - println("Test 6 passed") + logger.info("Test 5 passed") } private suspend fun getSecretValues(): String { @@ -109,7 +97,6 @@ class FirehoseTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/firehose/src/test/resources/logback.xml b/kotlin/services/firehose/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/firehose/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/forecast/build.gradle.kts b/kotlin/services/forecast/build.gradle.kts index 6f962cb5c73..ac766827fb1 100644 --- a/kotlin/services/forecast/build.gradle.kts +++ b/kotlin/services/forecast/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:forecast:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:forecast") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/forecast/src/test/kotlin/ForecastKotlinTest.kt b/kotlin/services/forecast/src/test/kotlin/ForecastKotlinTest.kt index 9c6cd44baf1..5444bb23702 100644 --- a/kotlin/services/forecast/src/test/kotlin/ForecastKotlinTest.kt +++ b/kotlin/services/forecast/src/test/kotlin/ForecastKotlinTest.kt @@ -1,6 +1,5 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -22,12 +21,15 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random import java.util.concurrent.TimeUnit @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class ForecastKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(ForecastKotlinTest::class.java) private var predictorARN = "" private var forecastArn = "" // set in test 3 private var forecastName = "" @@ -46,17 +48,6 @@ class ForecastKotlinTest { predictorARN = values.predARN.toString() forecastName = values.forecastName.toString() + randomNum dataSetName = values.dataSet.toString() + randomNum - - /* - // load the properties file. - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - forecastName = prop.getProperty("forecastName") - dataSetName = prop.getProperty("dataSetName") - predictorARN = prop.getProperty("predictorARN") - existingforecastDelete = prop.getProperty("existingforecastDelete") - */ } @Test @@ -65,7 +56,7 @@ class ForecastKotlinTest { runBlocking { myDataSetARN = createForecastDataSet(dataSetName).toString() assertTrue(!myDataSetARN.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -74,7 +65,7 @@ class ForecastKotlinTest { runBlocking { forecastArn = createNewForecast(forecastName, predictorARN).toString() assertTrue(!forecastArn.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -82,7 +73,7 @@ class ForecastKotlinTest { fun listDataSets() = runBlocking { listForecastDataSets() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -90,7 +81,7 @@ class ForecastKotlinTest { fun listDataSetGroups() = runBlocking { listDataGroups() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -98,7 +89,7 @@ class ForecastKotlinTest { fun listForecasts() = runBlocking { listAllForeCasts() - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -106,7 +97,7 @@ class ForecastKotlinTest { fun describeForecast() = runBlocking { describe(forecastArn) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -114,7 +105,7 @@ class ForecastKotlinTest { fun deleteDataSet() = runBlocking { deleteForecastDataSet(myDataSetARN) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -124,7 +115,7 @@ class ForecastKotlinTest { println("Wait 40 mins for resource to become available.") TimeUnit.MINUTES.sleep(40) delForecast(forecastArn) - println("Test 8 passed") + logger.info("Test 8 passed") } private suspend fun getSecretValues(): String { @@ -135,7 +126,6 @@ class ForecastKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/forecast/src/test/resources/logback.xml b/kotlin/services/forecast/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/forecast/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/glue/build.gradle.kts b/kotlin/services/glue/build.gradle.kts index 792fc5eb9fe..45872e73ad0 100644 --- a/kotlin/services/glue/build.gradle.kts +++ b/kotlin/services/glue/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:glue:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:glue") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/glue/src/main/kotlin/com/kotlin/glue/SearchTables.kt b/kotlin/services/glue/src/main/kotlin/com/kotlin/glue/SearchTables.kt index c06167c8c4f..a3190933dc2 100644 --- a/kotlin/services/glue/src/main/kotlin/com/kotlin/glue/SearchTables.kt +++ b/kotlin/services/glue/src/main/kotlin/com/kotlin/glue/SearchTables.kt @@ -40,7 +40,7 @@ suspend fun searchGlueTable(text: String?) { val request = SearchTablesRequest { searchText = text - resourceShareType = ResourceShareType.fromValue("All") + resourceShareType = ResourceShareType.All maxResults = 10 } diff --git a/kotlin/services/glue/src/test/kotlin/GlueTest.kt b/kotlin/services/glue/src/test/kotlin/GlueTest.kt index 7ecc579e5f1..e23b1ea69ef 100644 --- a/kotlin/services/glue/src/test/kotlin/GlueTest.kt +++ b/kotlin/services/glue/src/test/kotlin/GlueTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -18,11 +17,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class GlueTest { + private val logger: Logger = LoggerFactory.getLogger(GlueTest::class.java) private var cron = "" private var iam = "" private var tableName = "" @@ -53,55 +55,38 @@ class GlueTest { crawlerNameSc = values.crawlerNameSc.toString() + UUID.randomUUID() scriptLocationSc = values.scriptLocationSc.toString() locationUri = values.locationUri.toString() - - // Uncomment the block below if using config.properties file - /* - val input = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - cron = prop.getProperty("cron") - iam = prop.getProperty("IAM") - tableName = prop.getProperty("tableName") - text = prop.getProperty("text") - jobNameSc = prop.getProperty("jobNameSc") - s3PathSc = prop.getProperty("s3PathSc") - dbNameSc = prop.getProperty("dbNameSc") - crawlerNameSc = prop.getProperty("crawlerNameSc") - scriptLocationSc = prop.getProperty("scriptLocationSc") - locationUri = prop.getProperty("locationUri") - */ } @Test - @Order(2) + @Order(1) fun getCrawlersTest() = runBlocking { getAllCrawlers() - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(4) + @Order(2) fun getDatabasesTest() = runBlocking { getAllDatabases() - println("Test 4 passed") + logger.info("Test 2 passed") } @Test - @Order(5) + @Order(3) fun searchTablesTest() = runBlocking { searchGlueTable(text) - println("Test 5 passed") + logger.info("Test 3 passed") } @Test - @Order(6) + @Order(4) fun listWorkflowsTest() = runBlocking { listAllWorkflows() - println("Test 6 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -112,7 +97,6 @@ class GlueTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/glue/src/test/resources/logback.xml b/kotlin/services/glue/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/glue/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/iam/build.gradle.kts b/kotlin/services/iam/build.gradle.kts index 8febd761f60..44a71ed4570 100644 --- a/kotlin/services/iam/build.gradle.kts +++ b/kotlin/services/iam/build.gradle.kts @@ -27,16 +27,19 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:iam:1.2.28") - implementation("aws.sdk.kotlin:sts:1.2.28") - implementation("aws.sdk.kotlin:s3:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:iam") + implementation("aws.sdk.kotlin:sts") + implementation("aws.sdk.kotlin:s3") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") implementation("com.googlecode.json-simple:json-simple:1.1.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/iam/src/test/kotlin/IAMTest.kt b/kotlin/services/iam/src/test/kotlin/IAMTest.kt index 0be47acf2b2..3902813a8f1 100644 --- a/kotlin/services/iam/src/test/kotlin/IAMTest.kt +++ b/kotlin/services/iam/src/test/kotlin/IAMTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -28,10 +27,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory +import kotlin.random.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class IAMTest { + private val logger: Logger = LoggerFactory.getLogger(IAMTest::class.java) private var userName = "" private var policyName = "" private var roleName = "" @@ -51,9 +54,10 @@ class IAMTest { // Get the values to run these tests from AWS Secrets Manager. val gson = Gson() val json: String = getSecretValues() + val randomValue = Random.nextInt(1, 10001) val values = gson.fromJson(json, SecretValues::class.java) - userName = values.userName.toString() - policyName = values.policyName.toString() + userName = values.userName.toString() + randomValue + policyName = values.policyName.toString() + randomValue roleName = values.roleName.toString() accountAlias = values.accountAlias.toString() usernameSc = values.usernameSc.toString() @@ -62,22 +66,6 @@ class IAMTest { roleSessionName = values.roleName.toString() fileLocationSc = values.fileLocationSc.toString() bucketNameSc = values.bucketNameSc.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - userName = prop.getProperty("userName") - policyName = prop.getProperty("policyName") - roleName = prop.getProperty("roleName") - accountAlias = prop.getProperty("accountAlias") - policyNameSc = prop.getProperty("policyNameSc") - usernameSc = prop.getProperty("usernameSc") - roleNameSc = prop.getProperty("roleNameSc") - roleSessionName = prop.getProperty("roleSessionName") - fileLocationSc = prop.getProperty("fileLocationSc") - bucketNameSc = prop.getProperty("bucketNameSc") - */ } @Test @@ -88,7 +76,7 @@ class IAMTest { if (result != null) { Assertions.assertTrue(!result.isEmpty()) } - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -97,7 +85,7 @@ class IAMTest { runBlocking { policyARN = createIAMPolicy(policyName) Assertions.assertTrue(!policyARN.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -106,7 +94,7 @@ class IAMTest { runBlocking { keyId = createIAMAccessKey(userName) Assertions.assertTrue(!keyId.isEmpty()) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -114,7 +102,7 @@ class IAMTest { fun attachRolePolicyTest() = runBlocking { attachIAMRolePolicy(roleName, policyARN) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -122,7 +110,7 @@ class IAMTest { fun detachRolePolicyTest() = runBlocking { detachPolicy(roleName, policyARN) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -130,7 +118,7 @@ class IAMTest { fun getPolicyTest() = runBlocking { getIAMPolicy(policyARN) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -138,7 +126,7 @@ class IAMTest { fun listAccessKeysTest() = runBlocking { listKeys(userName) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -146,7 +134,7 @@ class IAMTest { fun listUsersTest() = runBlocking { listAllUsers() - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -154,7 +142,7 @@ class IAMTest { fun createAccountAliasTest() = runBlocking { createIAMAccountAlias(accountAlias) - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -162,7 +150,7 @@ class IAMTest { fun deleteAccountAliasTest() = runBlocking { deleteIAMAccountAlias(accountAlias) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -170,7 +158,7 @@ class IAMTest { fun deletePolicyTest() = runBlocking { deleteIAMPolicy(policyARN) - println("Test 11 passed") + logger.info("Test 11 passed") } @Test @@ -178,7 +166,7 @@ class IAMTest { fun deleteAccessKeyTest() = runBlocking { deleteKey(userName, keyId) - println("Test 12 passed") + logger.info("Test 12 passed") } @Test @@ -186,7 +174,7 @@ class IAMTest { fun deleteUserTest() = runBlocking { deleteIAMUser(userName) - println("Test 13 passed") + logger.info("Test 13 passed") } private suspend fun getSecretValues(): String { @@ -197,7 +185,6 @@ class IAMTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/iam/src/test/resources/logback.xml b/kotlin/services/iam/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/iam/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/iot/README.md b/kotlin/services/iot/README.md index a20a8745d52..58a4fb91b24 100644 --- a/kotlin/services/iot/README.md +++ b/kotlin/services/iot/README.md @@ -77,8 +77,19 @@ This example shows you how to get started using AWS IoT. #### Learn the basics -This example shows you how to work with AWS IoT device management. - +This example shows you how to do the following: + +- Create an AWS IoT Thing. +- Generate a device certificate. +- Update an AWS IoT Thing with Attributes. +- Return a unique endpoint. +- List your AWS IoT certificates. +- Create an AWS IoT shadow. +- Write out state information. +- Creates a rule. +- List your rules. +- Search things using the Thing name. +- Delete an AWS IoT Thing. @@ -114,4 +125,4 @@ in the `kotlin` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/kotlin/services/iot/build.gradle.kts b/kotlin/services/iot/build.gradle.kts index 9892e3970d2..46c0cc459e9 100644 --- a/kotlin/services/iot/build.gradle.kts +++ b/kotlin/services/iot/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:iot:1.2.28") - implementation("aws.sdk.kotlin:iotdataplane:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:iot") + implementation("aws.sdk.kotlin:iotdataplane") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/iot/src/test/kotlin/IoTTest.kt b/kotlin/services/iot/src/test/kotlin/IoTTest.kt index 7af9fc520a8..3eb243fc7e0 100644 --- a/kotlin/services/iot/src/test/kotlin/IoTTest.kt +++ b/kotlin/services/iot/src/test/kotlin/IoTTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.example.iot.attachCertificateToThing @@ -30,11 +29,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class IoTTest { + private val logger: Logger = LoggerFactory.getLogger(IoTTest::class.java) private var roleARN = "" private var snsAction = "" private var thingName = "foo" @@ -63,7 +65,7 @@ class IoTTest { fun helloIoTTest() = runBlocking { listAllThings() - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -89,14 +91,13 @@ class IoTTest { deleteCertificate(certificateArn) } deleteIoTThing(thingName) - println("Test 2 passed") + logger.info("Test 2 passed") } private suspend fun getSecretValues(): String { val secretClient = SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() } val secretName = "test/iot" val valueRequest = diff --git a/kotlin/services/iot/src/test/resources/logback.xml b/kotlin/services/iot/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/iot/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/kendra/build.gradle.kts b/kotlin/services/kendra/build.gradle.kts index 1b424d340b9..6588a3b090c 100644 --- a/kotlin/services/kendra/build.gradle.kts +++ b/kotlin/services/kendra/build.gradle.kts @@ -28,14 +28,16 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:kendra:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:kendra") + implementation("aws.sdk.kotlin:secretsmanager") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") - implementation("com.google.code.gson:gson:2.10") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/kendra/src/test/kotlin/KendraTest.kt b/kotlin/services/kendra/src/test/kotlin/KendraTest.kt index fef6b0c40aa..0129c724d2d 100644 --- a/kotlin/services/kendra/src/test/kotlin/KendraTest.kt +++ b/kotlin/services/kendra/src/test/kotlin/KendraTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.kendra.KendraClient import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest @@ -23,11 +22,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class KendraTest { + private val logger: Logger = LoggerFactory.getLogger(KendraTest::class.java) private var kendra: KendraClient? = null private var indexName = "" private var indexDescription = "" @@ -55,29 +57,6 @@ class KendraTest { dataSourceDescription = values.dataSourceDescription.toString() dataSourceRoleArn = values.dataSourceRoleArn.toString() text = values.text.toString() - - /* - try { - KendraTest::class.java.classLoader.getResourceAsStream("config.properties").use { input -> - val prop = Properties() - if (input == null) { - println("Sorry, unable to find config.properties") - return - } - prop.load(input) - indexName = prop.getProperty("indexName") - indexRoleArn = prop.getProperty("indexRoleArn") - indexDescription = prop.getProperty("indexDescription") - s3BucketName = prop.getProperty("s3BucketName") - dataSourceName = prop.getProperty("dataSourceName") - dataSourceDescription = prop.getProperty("dataSourceDescription") - dataSourceRoleArn = prop.getProperty("dataSourceRoleArn") - text = prop.getProperty("text") - } - } catch (ex: IOException) { - ex.printStackTrace() - } - */ } @Test @@ -86,7 +65,7 @@ class KendraTest { runBlocking { indexId = createIndex(indexDescription, indexName, indexRoleArn) assertTrue(!indexId.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -95,7 +74,7 @@ class KendraTest { runBlocking { dataSourceId = createDataSource(s3BucketName, dataSourceName, dataSourceDescription, indexId, dataSourceRoleArn) assertTrue(!dataSourceId.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -103,7 +82,7 @@ class KendraTest { fun syncDataSource() = runBlocking { startDataSource(indexId, dataSourceId) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -111,7 +90,7 @@ class KendraTest { fun listSyncJobs() = runBlocking { listSyncJobs(indexId, dataSourceId) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -119,7 +98,7 @@ class KendraTest { fun queryIndex() = runBlocking { querySpecificIndex(indexId, text) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -127,7 +106,7 @@ class KendraTest { fun deleteDataSource() = runBlocking { deleteSpecificDataSource(indexId, dataSourceId) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -135,7 +114,7 @@ class KendraTest { fun deleteIndex() = runBlocking { deleteSpecificIndex(indexId) - println("Test 7 passed") + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { @@ -146,7 +125,6 @@ class KendraTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/kendra/src/test/resources/logback.xml b/kotlin/services/kendra/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/kendra/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/keyspaces/build.gradle.kts b/kotlin/services/keyspaces/build.gradle.kts index a2771f4f4ef..65024ef6694 100644 --- a/kotlin/services/keyspaces/build.gradle.kts +++ b/kotlin/services/keyspaces/build.gradle.kts @@ -27,15 +27,18 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:keyspaces:1.2.28") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:keyspaces") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.datastax.oss:java-driver-core:4.15.0") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") implementation("software.aws.mcs:aws-sigv4-auth-cassandra-java-driver-plugin:4.0.8") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.fasterxml.jackson.core:jackson-core:2.14.2") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/keyspaces/src/test/kotlin/KeyspaceTest.kt b/kotlin/services/keyspaces/src/test/kotlin/KeyspaceTest.kt index fc4a4047bd0..d1e63b7014e 100644 --- a/kotlin/services/keyspaces/src/test/kotlin/KeyspaceTest.kt +++ b/kotlin/services/keyspaces/src/test/kotlin/KeyspaceTest.kt @@ -8,14 +8,19 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class KeyspaceTest { + private val logger: Logger = LoggerFactory.getLogger(KeyspaceTest::class.java) + @Test @Order(1) fun keyspaceTest() = runBlocking { listKeyspaces() + logger.info("Test 1 passed") } } diff --git a/kotlin/services/keyspaces/src/test/resources/logback.xml b/kotlin/services/keyspaces/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/keyspaces/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/kinesis/build.gradle.kts b/kotlin/services/kinesis/build.gradle.kts index 12a5cd6b41f..cacf949064a 100644 --- a/kotlin/services/kinesis/build.gradle.kts +++ b/kotlin/services/kinesis/build.gradle.kts @@ -27,12 +27,15 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:kinesis:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:kinesis") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.0") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/kinesis/src/test/kotlin/KinesisTest.kt b/kotlin/services/kinesis/src/test/kotlin/KinesisTest.kt index bd879dcb8e0..4159261c990 100644 --- a/kotlin/services/kinesis/src/test/kotlin/KinesisTest.kt +++ b/kotlin/services/kinesis/src/test/kotlin/KinesisTest.kt @@ -12,6 +12,8 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random import java.util.concurrent.TimeUnit import kotlin.system.exitProcess @@ -19,6 +21,7 @@ import kotlin.system.exitProcess @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class KinesisTest { + private val logger: Logger = LoggerFactory.getLogger(KinesisTest::class.java) private var streamName = "Stream" @BeforeAll @@ -33,7 +36,7 @@ class KinesisTest { fun createDataStreamTest() = runBlocking { createStream(streamName) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -41,7 +44,7 @@ class KinesisTest { fun describeLimitsTest() = runBlocking { describeKinLimits() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -56,14 +59,14 @@ class KinesisTest { System.err.println(e.message) exitProcess(1) } - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun deleteDataStreamTest() = runBlocking { deleteStream(streamName) - println("Test 7 passed") + logger.info("Test 4 passed") } } diff --git a/kotlin/services/kinesis/src/test/resources/logback.xml b/kotlin/services/kinesis/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/kinesis/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/kms/README.md b/kotlin/services/kms/README.md index 727598b7064..5e8e51329d4 100644 --- a/kotlin/services/kms/README.md +++ b/kotlin/services/kms/README.md @@ -36,11 +36,11 @@ Code excerpts that show you how to call individual service functions. - [CreateAlias](src/main/kotlin/com/kotlin/kms/CreateAlias.kt#L39) - [CreateGrant](src/main/kotlin/com/kotlin/kms/CreateGrant.kt#L43) - [CreateKey](src/main/kotlin/com/kotlin/kms/CreateCustomerKey.kt#L27) -- [Decrypt](src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt#L42) +- [Decrypt](src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt#L40) - [DescribeKey](src/main/kotlin/com/kotlin/kms/DescribeKey.kt#L37) - [DisableKey](src/main/kotlin/com/kotlin/kms/DisableCustomerKey.kt#L37) - [EnableKey](src/main/kotlin/com/kotlin/kms/EnableCustomerKey.kt#L37) -- [Encrypt](src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt#L42) +- [Encrypt](src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt#L40) - [ListAliases](src/main/kotlin/com/kotlin/kms/ListAliases.kt#L23) - [ListGrants](src/main/kotlin/com/kotlin/kms/ListGrants.kt#L36) - [ListKeys](src/main/kotlin/com/kotlin/kms/ListKeys.kt#L22) @@ -94,4 +94,4 @@ in the `kotlin` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/kotlin/services/kms/build.gradle.kts b/kotlin/services/kms/build.gradle.kts index 7807a201e2f..3834c0fa08d 100644 --- a/kotlin/services/kms/build.gradle.kts +++ b/kotlin/services/kms/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:kms:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:kms") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/kms/src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt b/kotlin/services/kms/src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt index cb7478eee19..0938dc040ce 100644 --- a/kotlin/services/kms/src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt +++ b/kotlin/services/kms/src/main/kotlin/com/kotlin/kms/EncryptDataKey.kt @@ -7,8 +7,8 @@ package com.kotlin.kms import aws.sdk.kotlin.services.kms.KmsClient import aws.sdk.kotlin.services.kms.model.DecryptRequest import aws.sdk.kotlin.services.kms.model.EncryptRequest -import java.io.File import kotlin.system.exitProcess + // snippet-end:[kms.kotlin_encrypt_data.import] /** @@ -25,18 +25,16 @@ suspend fun main(args: Array) { Where: keyId - A key id value to describe (for example, xxxxxbcd-12ab-34cd-56ef-1234567890ab). - path - The path of a text file where the data is written to (for example, C:\AWS\TextFile.txt). """ - if (args.size != 2) { + if (args.size != 1) { println(usage) exitProcess(0) } val keyId = args[0] - val path = args[1] val encryptedData = encryptData(keyId) - decryptData(encryptedData, keyId, path) + decryptData(encryptedData, keyId) } // snippet-start:[kms.kotlin_encrypt_data.main] @@ -63,7 +61,6 @@ suspend fun encryptData(keyIdValue: String): ByteArray? { suspend fun decryptData( encryptedDataVal: ByteArray?, keyIdVal: String?, - path: String, ) { val decryptRequest = DecryptRequest { @@ -74,10 +71,8 @@ suspend fun decryptData( val decryptResponse = kmsClient.decrypt(decryptRequest) val myVal = decryptResponse.plaintext - // Write the decrypted data to a file. - if (myVal != null) { - File(path).writeBytes(myVal) - } + // Print the decrypted data. + print(myVal) } } // snippet-end:[kms.kotlin_encrypt_data.main] diff --git a/kotlin/services/kms/src/test/kotlin/KMSKotlinTest.kt b/kotlin/services/kms/src/test/kotlin/KMSKotlinTest.kt index 9b670c534b5..739022bfa8c 100644 --- a/kotlin/services/kms/src/test/kotlin/KMSKotlinTest.kt +++ b/kotlin/services/kms/src/test/kotlin/KMSKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -28,17 +27,19 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class KMSKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(KMSKotlinTest::class.java) private var keyId = "" // gets set in test 2 private var keyDesc = "" private var granteePrincipal = "" private var operation = "" private var grantId = "" private var aliasName = "" - private var path = "" @BeforeAll fun setup() = @@ -51,77 +52,66 @@ class KMSKotlinTest { operation = values.operation.toString() aliasName = values.aliasName.toString() granteePrincipal = values.granteePrincipal.toString() - path = values.path.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - keyDesc = prop.getProperty("keyDesc") - granteePrincipal = prop.getProperty("granteePrincipal") - operation = prop.getProperty("operation") - aliasName = prop.getProperty("aliasName") - path = prop.getProperty("path") - */ } @Test - @Order(2) + @Order(1) fun createCustomerKeyTest() = runBlocking { keyId = createKey(keyDesc).toString() Assertions.assertTrue(!keyId.isEmpty()) - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(3) + @Order(2) fun encryptDataKeyTest() = runBlocking { + val plaintext = "Hello, AWS KMS!" val encryptData = encryptData(keyId) - decryptData(encryptData, keyId, path) - println("Test 3 passed") + decryptData(encryptData, keyId) + logger.info("Test 2 passed") } @Test - @Order(4) + @Order(3) fun disableCustomerKeyTest() = runBlocking { disableKey(keyId) - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun enableCustomerKeyTest() = runBlocking { enableKey(keyId) - println("Test 5 passed") + logger.info("Test 4 passed") } @Test - @Order(6) + @Order(5) fun createGrantTest() = runBlocking { grantId = createNewGrant(keyId, granteePrincipal, operation).toString() Assertions.assertTrue(!grantId.isEmpty()) - println("Test 6 passed") + logger.info("Test 5 passed") } @Test - @Order(7) + @Order(6) fun listGrantsTest() = runBlocking { displayGrantIds(keyId) - println("Test 7 passed") + logger.info("Test 6 passed") } @Test - @Order(8) + @Order(7) fun revokeGrantsTest() = runBlocking { revokeKeyGrant(keyId, grantId) - println("Test 8 passed") + logger.info("Test 7 passed") } @Test @@ -129,39 +119,39 @@ class KMSKotlinTest { fun describeKeyTest() = runBlocking { describeSpecifcKey(keyId) - println("Test 9 passed") + logger.info("Test 8 passed") } @Test - @Order(10) + @Order(9) fun createAliasTest() = runBlocking { createCustomAlias(keyId, aliasName) - println("Test 10 passed") + logger.info("Test 9 passed") } @Test - @Order(11) + @Order(10) fun listAliasesTest() = runBlocking { listAllAliases() - println("Test 11 passed") + logger.info("Test 10 passed") } @Test - @Order(12) + @Order(11) fun deleteAliasTest() = runBlocking { deleteSpecificAlias(aliasName) - println("Test 12 passed") + logger.info("Test 11 passed") } @Test - @Order(13) + @Order(12) fun listKeysTest() = runBlocking { listAllKeys() - println("Test 13 passed") + logger.info("Test 12 passed") } private suspend fun getSecretValues(): String { @@ -172,7 +162,6 @@ class KMSKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/kms/src/test/resources/logback.xml b/kotlin/services/kms/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/kms/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/lambda/build.gradle.kts b/kotlin/services/lambda/build.gradle.kts index dcc5a825436..da62888bee8 100644 --- a/kotlin/services/lambda/build.gradle.kts +++ b/kotlin/services/lambda/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:lambda:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:lambda") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/CreateFunction.kt b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/CreateFunction.kt index b3639985113..fbf88aa6a55 100644 --- a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/CreateFunction.kt +++ b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/CreateFunction.kt @@ -68,10 +68,10 @@ suspend fun createNewFunction( description = "Created by the Lambda Kotlin API" handler = myHandler role = myRole - runtime = Runtime.Java8 + runtime = Runtime.Java17 } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val functionResponse = awsLambda.createFunction(request) awsLambda.waitUntilFunctionActive { functionName = myFunctionName diff --git a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/DeleteFunction.kt b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/DeleteFunction.kt index e2d4d8f5e54..acef6a41cf9 100644 --- a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/DeleteFunction.kt +++ b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/DeleteFunction.kt @@ -42,7 +42,7 @@ suspend fun delLambdaFunction(myFunctionName: String) { functionName = myFunctionName } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> awsLambda.deleteFunction(request) println("$myFunctionName was deleted") } diff --git a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/LambdaScenario.kt b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/LambdaScenario.kt index ee938abceb8..c2d2393137a 100644 --- a/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/LambdaScenario.kt +++ b/kotlin/services/lambda/src/main/kotlin/com/kotlin/lambda/LambdaScenario.kt @@ -117,11 +117,11 @@ suspend fun createScFunction( description = "Created by the Lambda Kotlin API" handler = myHandler role = myRole - runtime = Runtime.Java8 + runtime = Runtime.Java17 } // Create a Lambda function using a waiter - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val functionResponse = awsLambda.createFunction(request) awsLambda.waitUntilFunctionActive { functionName = myFunctionName @@ -136,7 +136,7 @@ suspend fun getFunction(functionNameVal: String) { functionName = functionNameVal } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val response = awsLambda.getFunction(functionRequest) println("The runtime of this Lambda function is ${response.configuration?.runtime}") } @@ -148,7 +148,7 @@ suspend fun listFunctionsSc() { maxItems = 10 } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val response = awsLambda.listFunctions(request) response.functions?.forEach { function -> println("The function name is ${function.functionName}") @@ -166,7 +166,7 @@ suspend fun invokeFunctionSc(functionNameVal: String) { logType = LogType.Tail } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val res = awsLambda.invoke(request) println("The function payload is ${res.payload?.toString(Charsets.UTF_8)}") } @@ -185,7 +185,7 @@ suspend fun updateFunctionCode( s3Key = key } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> val response = awsLambda.updateFunctionCode(functionCodeRequest) awsLambda.waitUntilFunctionUpdated { functionName = functionNameVal @@ -202,10 +202,10 @@ suspend fun updateFunctionConfiguration( UpdateFunctionConfigurationRequest { functionName = functionNameVal handler = handlerVal - runtime = Runtime.Java11 + runtime = Runtime.Java17 } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> awsLambda.updateFunctionConfiguration(configurationRequest) } } @@ -216,7 +216,7 @@ suspend fun delFunction(myFunctionName: String) { functionName = myFunctionName } - LambdaClient { region = "us-west-2" }.use { awsLambda -> + LambdaClient { region = "us-east-1" }.use { awsLambda -> awsLambda.deleteFunction(request) println("$myFunctionName was deleted") } diff --git a/kotlin/services/lambda/src/test/kotlin/LambdaTest.kt b/kotlin/services/lambda/src/test/kotlin/LambdaTest.kt index cf17b447573..fa481f068b9 100644 --- a/kotlin/services/lambda/src/test/kotlin/LambdaTest.kt +++ b/kotlin/services/lambda/src/test/kotlin/LambdaTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -26,11 +25,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class LambdaTest { + private val logger: Logger = LoggerFactory.getLogger(LambdaTest::class.java) var functionName: String = "" var functionARN: String = "" // Gets set in a test. var s3BucketName: String = "" @@ -55,19 +57,6 @@ class LambdaTest { s3BucketName = values.bucketName.toString() updatedBucketName = values.bucketName2.toString() s3Key = values.key.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - functionName = prop.getProperty("functionName") - functionNameSc = prop.getProperty("functionNameSc") - s3BucketName = prop.getProperty("s3BucketName") - updatedBucketName = prop.getProperty("updatedBucketName") - s3Key = prop.getProperty("s3Key") - role = prop.getProperty("role") - handler = prop.getProperty("handler") - */ } @Test @@ -76,7 +65,7 @@ class LambdaTest { runBlocking { functionARN = createNewFunction(functionName, s3BucketName, s3Key, handler, role).toString() Assertions.assertTrue(!functionARN.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -84,7 +73,7 @@ class LambdaTest { fun listLambdaTest() = runBlocking { listFunctions() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -92,7 +81,7 @@ class LambdaTest { fun getAccountSettings() = runBlocking { getSettings() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -100,7 +89,7 @@ class LambdaTest { fun deleteFunctionTest() = runBlocking { delLambdaFunction(functionName) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -125,7 +114,7 @@ class LambdaTest { // Update the AWS Lambda function code. println("*** Update the Lambda function code.") - updateFunctionCode(functionNameSc, updatedBucketName, s3Key) + updateFunctionCode(functionNameSc, s3BucketName, s3Key) // println("*** Invoke the function again after updating the code.") invokeFunctionSc(functionNameSc) @@ -137,6 +126,7 @@ class LambdaTest { // Delete the AWS Lambda function. println("Delete the AWS Lambda function.") delFunction(functionNameSc) + logger.info("Test 5 passed") } private suspend fun getSecretValues(): String { @@ -147,7 +137,6 @@ class LambdaTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/lambda/src/test/resources/logback.xml b/kotlin/services/lambda/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/lambda/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/lex/build.gradle.kts b/kotlin/services/lex/build.gradle.kts index f34dced5b31..a5caa6e3501 100644 --- a/kotlin/services/lex/build.gradle.kts +++ b/kotlin/services/lex/build.gradle.kts @@ -28,14 +28,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:lexruntimeservice:1.0.30") - implementation("aws.sdk.kotlin:secretsmanager:1.0.30") - implementation("aws.sdk.kotlin:lexmodelbuildingservice:1.0.30") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:lexruntimeservice") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.sdk.kotlin:lexmodelbuildingservice") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/lex/src/main/kotlin/com/kotlin/lex/GetBotStatus.kt b/kotlin/services/lex/src/main/kotlin/com/kotlin/lex/GetBotStatus.kt index 95d4bf7d207..e115f8e5124 100644 --- a/kotlin/services/lex/src/main/kotlin/com/kotlin/lex/GetBotStatus.kt +++ b/kotlin/services/lex/src/main/kotlin/com/kotlin/lex/GetBotStatus.kt @@ -54,7 +54,7 @@ suspend fun getStatus(botName: String?) { val response = lexClient.getBot(request) status = response.status.toString() println("The status is $status") - } while (status.compareTo("READY") != 0) + } while (status.compareTo("Ready") != 0) } } // snippet-end:[lex.kotlin.get_status.main] diff --git a/kotlin/services/lex/src/test/kotlin/LexTest.kt b/kotlin/services/lex/src/test/kotlin/LexTest.kt index 9629e4d1e24..c76d853eea4 100644 --- a/kotlin/services/lex/src/test/kotlin/LexTest.kt +++ b/kotlin/services/lex/src/test/kotlin/LexTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -20,10 +19,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class LexTest { + private val logger: Logger = LoggerFactory.getLogger(LexTest::class.java) private var botName = "" private var intentName = "" private var intentVersion = "" @@ -38,13 +40,6 @@ class LexTest { botName = values.botName.toString() intentName = values.intentName.toString() intentVersion = values.intentVersion.toString() - - // val input = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - // val prop = Properties() - // prop.load(input) - // botName = prop.getProperty("botName") - // intentName = prop.getProperty("intentName") - // intentVersion = prop.getProperty("intentVersion") } @Test @@ -52,7 +47,7 @@ class LexTest { fun putBotTest() = runBlocking { createBot(botName, intentName, intentVersion) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -60,7 +55,7 @@ class LexTest { fun getBotsTest() = runBlocking { getAllBots() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -68,7 +63,7 @@ class LexTest { fun getIntentTest() = runBlocking { getSpecificIntent(intentName, intentVersion) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -76,7 +71,7 @@ class LexTest { fun getSlotTypesTest() = runBlocking { getSlotsInfo() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -84,7 +79,7 @@ class LexTest { fun getBotStatusTest() = runBlocking { getStatus(botName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -92,7 +87,7 @@ class LexTest { fun deleteBotTest() = runBlocking { deleteSpecificBot(botName) - println("Test 6 passed") + logger.info("Test 6 passed") } private suspend fun getSecretValues(): String { @@ -103,7 +98,6 @@ class LexTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/lex/src/test/resources/logback.xml b/kotlin/services/lex/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/lex/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/mediaconvert/build.gradle.kts b/kotlin/services/mediaconvert/build.gradle.kts index 1e3b0b69ac1..680aa4db3ef 100644 --- a/kotlin/services/mediaconvert/build.gradle.kts +++ b/kotlin/services/mediaconvert/build.gradle.kts @@ -28,13 +28,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:mediaconvert:1.0.30") - implementation("aws.sdk.kotlin:secretsmanager:1.0.30") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.0.30")) + implementation("aws.sdk.kotlin:mediaconvert") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/mediaconvert/src/test/kotlin/MCTest.kt b/kotlin/services/mediaconvert/src/test/kotlin/MCTest.kt index b2c6a725bde..a278903e946 100644 --- a/kotlin/services/mediaconvert/src/test/kotlin/MCTest.kt +++ b/kotlin/services/mediaconvert/src/test/kotlin/MCTest.kt @@ -1,6 +1,8 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 +import aws.sdk.kotlin.services.mediaconvert.MediaConvertClient +import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson import com.kotlin.mediaconvert.createMediaJob @@ -16,11 +18,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.io.IOException @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class MCTest { + private val logger: Logger = LoggerFactory.getLogger(MCTest::class.java) lateinit var mcClient: MediaConvertClient private var mcRoleARN = "" private var fileInput = "" @@ -37,39 +42,31 @@ class MCTest { val values = gson.fromJson(json, SecretValues::class.java) mcRoleARN = values.mcRoleARN.toString() fileInput = values.fileInput.toString() - /* - - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - mcRoleARN = prop.getProperty("mcRoleARN") - fileInput = prop.getProperty("fileInput") - */ } @Test - @Order(2) + @Order(1) fun createJobTest() = runBlocking { jobId = createMediaJob(mcClient, mcRoleARN, fileInput).toString() assertTrue(!jobId.isEmpty()).toString() - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(3) + @Order(2) fun listJobsTest() = runBlocking { listCompleteJobs(mcClient) - println("Test 3 passed") + logger.info("Test 2 passed") } @Test - @Order(4) + @Order(3) fun getJobTest() = runBlocking { getSpecificJob(mcClient, jobId) - println("Test 4 passed") + logger.info("Test 3 passed") } private suspend fun getSecretValues(): String { @@ -80,7 +77,6 @@ class MCTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/mediaconvert/src/test/resources/logback.xml b/kotlin/services/mediaconvert/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/mediaconvert/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/mediastore/build.gradle.kts b/kotlin/services/mediastore/build.gradle.kts index 6a2b24d1874..8965d5f82bb 100644 --- a/kotlin/services/mediastore/build.gradle.kts +++ b/kotlin/services/mediastore/build.gradle.kts @@ -26,11 +26,14 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:mediastore:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:mediastore") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/mediastore/src/test/kotlin/MediaStoreTest.kt b/kotlin/services/mediastore/src/test/kotlin/MediaStoreTest.kt index c4a6c4dc0e2..4aa0cdfbe97 100644 --- a/kotlin/services/mediastore/src/test/kotlin/MediaStoreTest.kt +++ b/kotlin/services/mediastore/src/test/kotlin/MediaStoreTest.kt @@ -12,11 +12,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class MediaStoreTest { + private val logger: Logger = LoggerFactory.getLogger(MediaStoreTest::class.java) private var containerName = "" @BeforeAll @@ -30,16 +33,16 @@ class MediaStoreTest { @Order(1) fun createContainerTest() = runBlocking { - println("Status is " + createMediaContainer(containerName)) - println("Test 1 passed") + logger.info("Status is " + createMediaContainer(containerName)) + logger.info("Test 1 passed") } @Test @Order(2) fun describeContainerTest() = runBlocking { - println("Status is " + checkContainer(containerName)) - println("Test 2 passed") + logger.info("Status is " + checkContainer(containerName)) + logger.info("Test 2 passed") } @Test @@ -47,7 +50,7 @@ class MediaStoreTest { fun listContainersTest() = runBlocking { listAllContainers() - println("Test 4 passed") + logger.info("Test 3 passed") } @Test @@ -55,6 +58,6 @@ class MediaStoreTest { fun deleteContainerTest() = runBlocking { deleteMediaContainer(containerName) - println("Test 4 passed") + logger.info("Test 4 passed") } } diff --git a/kotlin/services/mediastore/src/test/resources/logback.xml b/kotlin/services/mediastore/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/mediastore/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/opensearch/build.gradle.kts b/kotlin/services/opensearch/build.gradle.kts index a5ba54dcddd..43f14cae347 100644 --- a/kotlin/services/opensearch/build.gradle.kts +++ b/kotlin/services/opensearch/build.gradle.kts @@ -27,12 +27,15 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:opensearch:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:opensearch") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { diff --git a/kotlin/services/opensearch/src/test/kotlin/OpenSearchTest.kt b/kotlin/services/opensearch/src/test/kotlin/OpenSearchTest.kt index c763f8d36a0..eee5fa8fd54 100644 --- a/kotlin/services/opensearch/src/test/kotlin/OpenSearchTest.kt +++ b/kotlin/services/opensearch/src/test/kotlin/OpenSearchTest.kt @@ -11,11 +11,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class OpenSearchTest { + private val logger: Logger = LoggerFactory.getLogger(OpenSearchTest::class.java) private var domainName = "" @BeforeAll @@ -30,7 +33,7 @@ class OpenSearchTest { fun createDomainTest() = runBlocking { createNewDomain(domainName) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -38,7 +41,7 @@ class OpenSearchTest { fun listDomainNamesTest() = runBlocking { listAllDomains() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -46,6 +49,6 @@ class OpenSearchTest { fun deleteDomainTest() = runBlocking { deleteSpecificDomain(domainName) - println("Test 4 passed") + logger.info("Test 3 passed") } } diff --git a/kotlin/services/opensearch/src/test/resources/logback.xml b/kotlin/services/opensearch/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/opensearch/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/personalize/build.gradle.kts b/kotlin/services/personalize/build.gradle.kts index 8605242f23c..6c4e3754f42 100644 --- a/kotlin/services/personalize/build.gradle.kts +++ b/kotlin/services/personalize/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:personalize:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.sdk.kotlin:personalizeruntime:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:personalize") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.sdk.kotlin:personalizeruntime") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/personalize/src/test/kotlin/PersonalizeKotlinTest.kt b/kotlin/services/personalize/src/test/kotlin/PersonalizeKotlinTest.kt index 63a43976124..c217663285d 100644 --- a/kotlin/services/personalize/src/test/kotlin/PersonalizeKotlinTest.kt +++ b/kotlin/services/personalize/src/test/kotlin/PersonalizeKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -26,12 +25,15 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID import java.util.concurrent.TimeUnit @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class PersonalizeKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(PersonalizeKotlinTest::class.java) private var datasetGroupArn = "" private var solutionArn = "" private var solutionVersionArn = "" @@ -55,18 +57,6 @@ class PersonalizeKotlinTest { solutionName = values.solutionName.toString() + UUID.randomUUID() userId = values.userId.toString() campaignName = values.campaignName.toString() + UUID.randomUUID() - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - solutionName = prop.getProperty("solutionName") - datasetGroupArn = prop.getProperty("datasetGroupArn") - recipeArn = prop.getProperty("recipeArn") - solutionVersionArn = prop.getProperty("solutionVersionArn") - campaignName = prop.getProperty("campaignName") - campaignArn = prop.getProperty("campaignArn") - userId = prop.getProperty("userId") - */ } @Test @@ -75,7 +65,7 @@ class PersonalizeKotlinTest { runBlocking { solutionArn = createPersonalizeSolution(datasetGroupArn, solutionName, recipeArn).toString() assertTrue(!solutionArn.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -83,7 +73,7 @@ class PersonalizeKotlinTest { fun listSolutions() = runBlocking { listAllSolutions(datasetGroupArn) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -91,7 +81,7 @@ class PersonalizeKotlinTest { fun describeSolution() = runBlocking { describeSpecificSolution(solutionArn) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -100,7 +90,7 @@ class PersonalizeKotlinTest { runBlocking { newCampaignArn = createPersonalCompaign(solutionVersionArn, campaignName).toString() assertTrue(!newCampaignArn.isEmpty()) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -110,7 +100,7 @@ class PersonalizeKotlinTest { println("Wait 20 mins for resource to become available.") TimeUnit.MINUTES.sleep(20) describeSpecificCampaign(newCampaignArn) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -118,7 +108,7 @@ class PersonalizeKotlinTest { fun listCampaigns() = runBlocking { listAllCampaigns(solutionArn) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -126,7 +116,7 @@ class PersonalizeKotlinTest { fun listRecipes() = runBlocking { listAllRecipes() - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -134,7 +124,7 @@ class PersonalizeKotlinTest { fun listDatasetGroups() = runBlocking { listDSGroups() - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -142,7 +132,7 @@ class PersonalizeKotlinTest { fun deleteSolution() = runBlocking { deleteGivenSolution(solutionArn) - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -150,7 +140,7 @@ class PersonalizeKotlinTest { fun getRecommendations() = runBlocking { getRecs(newCampaignArn, userId) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -158,7 +148,7 @@ class PersonalizeKotlinTest { fun deleteCampaign() = runBlocking { deleteSpecificCampaign(newCampaignArn) - println("Test 11 passed") + logger.info("Test 11 passed") } private suspend fun getSecretValues(): String { @@ -169,7 +159,6 @@ class PersonalizeKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/personalize/src/test/resources/logback.xml b/kotlin/services/personalize/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/personalize/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/pinpoint/build.gradle.kts b/kotlin/services/pinpoint/build.gradle.kts index 5b08a81f4d8..86eee956a4b 100644 --- a/kotlin/services/pinpoint/build.gradle.kts +++ b/kotlin/services/pinpoint/build.gradle.kts @@ -27,15 +27,18 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:pinpoint:1.2.28") - implementation("aws.sdk.kotlin:pinpointemail:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:pinpoint") + implementation("aws.sdk.kotlin:pinpointemail") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.google.code.gson:gson:2.10.1") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/pinpoint/src/test/kotlin/PinpointKotlinTest.kt b/kotlin/services/pinpoint/src/test/kotlin/PinpointKotlinTest.kt index 1242985e104..840faabb6af 100644 --- a/kotlin/services/pinpoint/src/test/kotlin/PinpointKotlinTest.kt +++ b/kotlin/services/pinpoint/src/test/kotlin/PinpointKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -27,10 +26,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class PinpointKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(PinpointKotlinTest::class.java) private var appName = "" private var appId = "" private var endpointId = "" @@ -60,28 +62,6 @@ class PinpointKotlinTest { toAddress = valuesOb.toAddress.toString() subject = valuesOb.subject.toString() existingApp = valuesOb.existingApplicationId.toString() - - /* - try { - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - appName = prop.getProperty("appName") - originationNumber = prop.getProperty("originationNumber") - destinationNumber = prop.getProperty("destinationNumber") - message = prop.getProperty("message") - userId = prop.getProperty("userId") - senderAddress = prop.getProperty("senderAddress") - toAddress = prop.getProperty("toAddress") - subject = prop.getProperty("subject") - existingApp = prop.getProperty("existingApp") - existingEndpoint = prop.getProperty("existingEndpoint") - } catch (ex: IOException) { - ex.printStackTrace() - } - */ } @Test @@ -90,7 +70,7 @@ class PinpointKotlinTest { runBlocking { appId = createApplication(appName).toString() assertTrue(!appId.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -99,7 +79,7 @@ class PinpointKotlinTest { runBlocking { endpointId = createPinpointEndpoint(appId).toString() assertTrue(!endpointId.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -107,7 +87,7 @@ class PinpointKotlinTest { fun addExampleEndpointTest() = runBlocking { updateEndpointsViaBatch(appId) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -115,7 +95,7 @@ class PinpointKotlinTest { fun lookUpEndpointTest() = runBlocking { lookupPinpointEndpoint(appId, endpointId) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -123,7 +103,7 @@ class PinpointKotlinTest { fun deleteEndpointTest() = runBlocking { deletePinEncpoint(appId, endpointId) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -131,7 +111,7 @@ class PinpointKotlinTest { fun sendMessageTest() = runBlocking { sendSMSMessage(message, appId, originationNumber, destinationNumber) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -140,7 +120,7 @@ class PinpointKotlinTest { runBlocking { segmentId = createPinpointSegment(appId).toString() assertTrue(!segmentId.isEmpty()) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -148,7 +128,7 @@ class PinpointKotlinTest { fun listSegmentsTest() = runBlocking { listSegs(appId) - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -156,7 +136,7 @@ class PinpointKotlinTest { fun createCampaignTest() = runBlocking { createPinCampaign(appId, segmentId) - println("Test 9 passed") + logger.info("Test 9 passed") } @Test @@ -164,7 +144,7 @@ class PinpointKotlinTest { fun sendEmailMessageTest() = runBlocking { sendEmail(subject, senderAddress, toAddress) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -172,7 +152,7 @@ class PinpointKotlinTest { fun listEndpointIdsTest() = runBlocking { listAllEndpoints(existingApp, userId) - println("Test 11 passed") + logger.info("Test 11 passed") } @Test @@ -180,7 +160,7 @@ class PinpointKotlinTest { fun deleteAppTest() = runBlocking { deletePinApp(appId) - println("Test 12 passed") + logger.info("Test 12 passed") } private suspend fun getSecretValues(): String { @@ -191,7 +171,6 @@ class PinpointKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/pinpoint/src/test/resources/logback.xml b/kotlin/services/pinpoint/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/pinpoint/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/polly/build.gradle.kts b/kotlin/services/polly/build.gradle.kts index 8564452b663..9642b6b3e8d 100644 --- a/kotlin/services/polly/build.gradle.kts +++ b/kotlin/services/polly/build.gradle.kts @@ -27,12 +27,15 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:polly:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:polly") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation("com.googlecode.soundlibs:jlayer:1.0.1.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/polly/src/test/kotlin/PollyKotlinTest.kt b/kotlin/services/polly/src/test/kotlin/PollyKotlinTest.kt index 8cd08e0c31a..c228e682a16 100644 --- a/kotlin/services/polly/src/test/kotlin/PollyKotlinTest.kt +++ b/kotlin/services/polly/src/test/kotlin/PollyKotlinTest.kt @@ -10,16 +10,20 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class PollyKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(PollyKotlinTest::class.java) + @Test @Order(1) fun pollyDemo() = runBlocking { talkPolly() - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -27,7 +31,7 @@ class PollyKotlinTest { fun describeVoicesSample() = runBlocking { describeVoice() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -35,6 +39,6 @@ class PollyKotlinTest { fun listLexiconsTest() = runBlocking { listLexicons() - println("Test 3 passed") + logger.info("Test 3 passed") } } diff --git a/kotlin/services/polly/src/test/resources/logback.xml b/kotlin/services/polly/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/polly/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/rds/build.gradle.kts b/kotlin/services/rds/build.gradle.kts index 2f966f33191..6fba6c6c379 100644 --- a/kotlin/services/rds/build.gradle.kts +++ b/kotlin/services/rds/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:rds:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:rds") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/CreateDBInstance.kt b/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/CreateDBInstance.kt index ef51764c66f..68cf0dd3ff6 100644 --- a/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/CreateDBInstance.kt +++ b/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/CreateDBInstance.kt @@ -65,9 +65,9 @@ suspend fun createDatabaseInstance( allocatedStorage = 100 dbName = dbNamedbVal engine = "mysql" - dbInstanceClass = "db.m4.large" - engineVersion = "8.0" - storageType = "standard" + dbInstanceClass = "db.t3.micro" // Use a supported instance class + engineVersion = "8.0.39" // Use a supported engine version + storageType = "gp2" masterUsername = masterUsernameVal masterUserPassword = masterUserPasswordVal } diff --git a/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/RDSScenario.kt b/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/RDSScenario.kt index 9a2d63e85f5..3755ba17c2e 100644 --- a/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/RDSScenario.kt +++ b/kotlin/services/rds/src/main/kotlin/com/kotlin/rds/RDSScenario.kt @@ -306,9 +306,9 @@ suspend fun createDatabaseInstance( dbName = dbNameVal dbParameterGroupName = dbGroupNameVal engine = "mysql" - dbInstanceClass = "db.m4.large" - engineVersion = "8.0" - storageType = "standard" + dbInstanceClass = "db.t3.micro" + engineVersion = "8.0.35" + storageType = "gp2" masterUsername = masterUsernameVal masterUserPassword = masterUserPasswordVal } diff --git a/kotlin/services/rds/src/test/kotlin/RDSTest.kt b/kotlin/services/rds/src/test/kotlin/RDSTest.kt index d7d6d7f7833..cd3a19101a9 100644 --- a/kotlin/services/rds/src/test/kotlin/RDSTest.kt +++ b/kotlin/services/rds/src/test/kotlin/RDSTest.kt @@ -32,6 +32,8 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random import java.util.UUID @@ -42,6 +44,7 @@ import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class RDSTest { + private val logger: Logger = LoggerFactory.getLogger(RDSTest::class.java) private var dbInstanceIdentifier = "" private var dbSnapshotIdentifier = "" private var dbName = "" @@ -75,116 +78,76 @@ class RDSTest { dbInstanceIdentifierSc = values.dbInstanceIdentifierSc + UUID.randomUUID() dbSnapshotIdentifierSc = values.dbSnapshotIdentifierSc + UUID.randomUUID() dbNameSc = values.dbNameSc + randomNum - -// Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // Load the properties file. - prop.load(input) - dbInstanceIdentifier = prop.getProperty("dbInstanceIdentifier") - dbSnapshotIdentifier = prop.getProperty("dbSnapshotIdentifier") - dbName = prop.getProperty("dbName") - masterUsername = prop.getProperty("masterUsername") - masterUserPassword = prop.getProperty("masterUserPassword") - newMasterUserPassword = prop.getProperty("newMasterUserPassword") - dbGroupNameSc = prop.getProperty("dbGroupNameSc") - dbParameterGroupFamilySc = prop.getProperty("dbParameterGroupFamilySc") - dbInstanceIdentifierSc = prop.getProperty("dbInstanceIdentifierSc") - masterUsernameSc = prop.getProperty("masterUsernameSc") - masterUserPasswordSc = prop.getProperty("masterUserPasswordSc") - dbSnapshotIdentifierSc = prop.getProperty("dbSnapshotIdentifierSc") - dbNameSc = prop.getProperty("dbNameSc") - */ } @Test - @Order(2) + @Order(1) fun createDBInstanceTest() = runBlocking { createDatabaseInstance(dbInstanceIdentifier, dbName, masterUsername, masterUserPassword) - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(3) + @Order(2) fun waitForInstanceReadyTest() = runBlocking { waitForInstanceReady(dbInstanceIdentifier) - println("Test 3 passed") + logger.info("Test 2 passed") } @Test - @Order(4) + @Order(3) fun describeAccountAttributesTest() = runBlocking { getAccountAttributes() - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun describeDBInstancesTest() = runBlocking { describeInstances() - println("Test 5 passed") + logger.info("Test 4 passed") } @Test - @Order(6) + @Order(5) fun modifyDBInstanceTest() = runBlocking { updateIntance(dbInstanceIdentifier, newMasterUserPassword) - println("Test 6 passed") + logger.info("Test 5 passed") } @Test - @Order(7) + @Order(6) fun createDBSnapshotTest() = runBlocking { createSnapshot(dbInstanceIdentifier, dbSnapshotIdentifier) - println("Test 7 passed") + logger.info("Test 6 passed") } @Test - @Order(8) + @Order(7) fun deleteDBInstanceTest() = runBlocking { deleteDatabaseInstance(dbInstanceIdentifier) - println("Test 8 passed") + logger.info("Test 7 passed") } @Test - @Order(9) + @Order(8) fun scenarioTest() = runBlocking { - println("1. Return a list of the available DB engines") describeDBEngines() - - println("2. Create a custom parameter group") createDBParameterGroup(dbGroupNameSc, dbParameterGroupFamilySc) - - println("3. Get the parameter groups") describeDbParameterGroups(dbGroupNameSc) - - println("4. Get the parameters in the group") describeDbParameters(dbGroupNameSc, 0) - - println("5. Modify the auto_increment_offset parameter") modifyDBParas(dbGroupNameSc) - - println("6. Display the updated value") describeDbParameters(dbGroupNameSc, -1) - - println("7. Get a list of allowed engine versions") getAllowedEngines(dbParameterGroupFamilySc) - - println("8. Get a list of micro instance classes available for the selected engine") getMicroInstances() - - println("9. Create an RDS database instance that contains a MySql database and uses the parameter group") val dbARN = createDatabaseInstance( dbGroupNameSc, @@ -193,25 +156,14 @@ class RDSTest { masterUsername, masterUserPassword, ) - println("The ARN of the new database is $dbARN") - - println("10. Wait for DB instance to be ready") waitForDbInstanceReady(dbInstanceIdentifierSc) - - println("11. Create a snapshot of the DB instance") createDbSnapshot(dbInstanceIdentifierSc, dbSnapshotIdentifierSc) - - println("12. Wait for DB snapshot to be ready") waitForSnapshotReady(dbInstanceIdentifierSc, dbSnapshotIdentifierSc) - - println("13. Delete the DB instance") deleteDbInstance(dbInstanceIdentifierSc) - - println("14. Delete the parameter group") if (dbARN != null) { deleteParaGroup(dbGroupNameSc, dbARN) } - println("The Scenario has successfully completed.") + logger.info("Test 8 passed.") } suspend fun getSecretValues(): String? { diff --git a/kotlin/services/rds/src/test/resources/logback.xml b/kotlin/services/rds/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/rds/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/redshift/README.md b/kotlin/services/redshift/README.md index 38ce245a996..7f87134bc62 100644 --- a/kotlin/services/redshift/README.md +++ b/kotlin/services/redshift/README.md @@ -36,7 +36,7 @@ Code excerpts that show you how to call individual service functions. - [CreateCluster](src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt#L56) - [DeleteCluster](src/main/kotlin/com/kotlin/redshift/DeleteCluster.kt#L38) - [DescribeClusters](src/main/kotlin/com/kotlin/redshift/DescribeClusters.kt#L22) -- [ModifyCluster](src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt#L112) +- [ModifyCluster](src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt#L113) @@ -83,4 +83,4 @@ in the `kotlin` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/kotlin/services/redshift/build.gradle.kts b/kotlin/services/redshift/build.gradle.kts index d3d4f953fad..92a2d915e89 100644 --- a/kotlin/services/redshift/build.gradle.kts +++ b/kotlin/services/redshift/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:redshift:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:redshift") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/redshift/src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt b/kotlin/services/redshift/src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt index 0804ae86cc3..248c2c691d9 100644 --- a/kotlin/services/redshift/src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt +++ b/kotlin/services/redshift/src/main/kotlin/com/kotlin/redshift/CreateAndModifyCluster.kt @@ -62,6 +62,7 @@ suspend fun createCluster( val clusterRequest = CreateClusterRequest { clusterIdentifier = clusterId + availabilityZone = "us-east-1a" masterUsername = masterUsernameVal masterUserPassword = masterUserPasswordVal nodeType = "ra3.4xlarge" diff --git a/kotlin/services/redshift/src/test/kotlin/RedshiftKotlinTest.kt b/kotlin/services/redshift/src/test/kotlin/RedshiftKotlinTest.kt index c5eb7d963ec..3034805b7ed 100644 --- a/kotlin/services/redshift/src/test/kotlin/RedshiftKotlinTest.kt +++ b/kotlin/services/redshift/src/test/kotlin/RedshiftKotlinTest.kt @@ -3,14 +3,9 @@ import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson -import com.kotlin.redshift.User import com.kotlin.redshift.createCluster -import com.kotlin.redshift.deleteRedshiftCluster import com.kotlin.redshift.describeRedshiftClusters import com.kotlin.redshift.findReservedNodeOffer -import com.kotlin.redshift.listRedShiftEvents -import com.kotlin.redshift.modifyCluster -import com.kotlin.redshift.waitForClusterReady import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.DisplayName @@ -20,6 +15,8 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random /** @@ -29,9 +26,11 @@ import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class RedshiftKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(RedshiftKotlinTest::class.java) private var clusterId = "" private var eventSourceType = "" - private var secretName = "" + private var username = "" + private var password = "" @BeforeAll fun setup() = @@ -44,84 +43,33 @@ class RedshiftKotlinTest { val json: String = getSecretValues().toString() val values = gson.fromJson(json, SecretValues::class.java) clusterId = values.clusterId + randomNum - secretName = values.secretName.toString() + username = values.userName.toString() + password = values.password.toString() eventSourceType = values.eventSourceType.toString() - -// Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. -/* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - clusterId = prop.getProperty("clusterId") - eventSourceType = prop.getProperty("eventSourceType") - secretName prop.getProperty("secretName") - */ } @Test @Order(1) fun createClusterTest() = runBlocking { - val gson = Gson() - val user = - gson.fromJson( - com.kotlin.redshift - .getSecretValues(secretName) - .toString(), - User::class.java, - ) - val username = user.username - val userPassword = user.password - createCluster(clusterId, username, userPassword) - println("Test 2 passed") + createCluster(clusterId, username, password) + logger.info("Test 1 passed") } @Test @Order(2) - fun waitForClusterReadyTest() = - runBlocking { - waitForClusterReady(clusterId) - println("Test 3 passed") - } - - @Test - @Order(3) - fun modifyClusterReadyTest() = - runBlocking { - modifyCluster(clusterId) - println("Test 4 passed") - } - - @Test - @Order(4) fun describeClustersTest() = runBlocking { describeRedshiftClusters() - println("Test 5 passed") + logger.info("Test 2 passed") } @Test - @Order(5) + @Order(3) fun findReservedNodeOfferTest() = runBlocking { findReservedNodeOffer() - println("Test 6 passed") - } - - @Test - @Order(6) - fun listEventsTest() = - runBlocking { - listRedShiftEvents(clusterId, eventSourceType) - println("Test 7 passed") - } - - @Test - @Order(7) - fun deleteClusterTest() = - runBlocking { - deleteRedshiftCluster(clusterId) - println("Test 8 passed") + logger.info("Test 3 passed") } suspend fun getSecretValues(): String? { @@ -142,6 +90,7 @@ class RedshiftKotlinTest { internal inner class SecretValues { val clusterId: String? = null val eventSourceType: String? = null - val secretName: String? = null + val userName: String? = null + val password: String? = null } } diff --git a/kotlin/services/redshift/src/test/resources/logback.xml b/kotlin/services/redshift/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/redshift/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/rekognition/build.gradle.kts b/kotlin/services/rekognition/build.gradle.kts index 621ba57b5af..fb3c6dc7d0b 100644 --- a/kotlin/services/rekognition/build.gradle.kts +++ b/kotlin/services/rekognition/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:rekognition:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:rekognition") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectFaces.kt b/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectFaces.kt index 68073d965da..b730ceb46a3 100644 --- a/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectFaces.kt +++ b/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectFaces.kt @@ -106,7 +106,7 @@ suspend fun getFaceResults() { while (!finished) { response = rekClient.getFaceDetection(recognitionRequest) status = response.jobStatus.toString() - if (status.compareTo("SUCCEEDED") == 0) { + if (status.compareTo("Succeeded") == 0) { finished = true } else { println("$yy status is: $status") diff --git a/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectInappropriate.kt b/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectInappropriate.kt index 1d86649ab7d..e3f3c6096a7 100644 --- a/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectInappropriate.kt +++ b/kotlin/services/rekognition/src/main/kotlin/com/kotlin/rekognition/VideoDetectInappropriate.kt @@ -104,7 +104,7 @@ suspend fun getModResults() { while (!finished) { modDetectionResponse = rekClient.getContentModeration(modRequest) status = modDetectionResponse.jobStatus.toString() - if (status.compareTo("SUCCEEDED") == 0) { + if (status.compareTo("Succeeded") == 0) { finished = true } else { println("$yy status is: $status") diff --git a/kotlin/services/rekognition/src/test/kotlin/RekognitionTest.kt b/kotlin/services/rekognition/src/test/kotlin/RekognitionTest.kt index cab8f00053d..f3af93b966d 100644 --- a/kotlin/services/rekognition/src/test/kotlin/RekognitionTest.kt +++ b/kotlin/services/rekognition/src/test/kotlin/RekognitionTest.kt @@ -1,25 +1,16 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.rekognition.model.NotificationChannel import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson -import com.kotlin.rekognition.addToCollection -import com.kotlin.rekognition.compareTwoFaces import com.kotlin.rekognition.createMyCollection import com.kotlin.rekognition.describeColl -import com.kotlin.rekognition.detectFacesinImage -import com.kotlin.rekognition.detectImageLabels -import com.kotlin.rekognition.detectModLabels -import com.kotlin.rekognition.detectTextLabels -import com.kotlin.rekognition.displayGear import com.kotlin.rekognition.getCelebrityInfo import com.kotlin.rekognition.getFaceResults import com.kotlin.rekognition.getModResults import com.kotlin.rekognition.listAllCollections -import com.kotlin.rekognition.recognizeAllCelebrities import com.kotlin.rekognition.startFaceDetection import com.kotlin.rekognition.startModerationDetection import kotlinx.coroutines.runBlocking @@ -31,11 +22,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class RekognitionTest { + private val logger: Logger = LoggerFactory.getLogger(RekognitionTest::class.java) private var channel: NotificationChannel? = null private var facesImage = "" private var celebritiesImage = "" @@ -77,152 +71,64 @@ class RekognitionTest { modVid = values.modVid.toString() textVid = values.textVid.toString() celVid = values.celVid.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - - // Populate the data members required for all tests. - facesImage = prop.getProperty("facesImage") - celebritiesImage = prop.getProperty("celebritiesImage") - faceImage2 = prop.getProperty("faceImage2") - celId = prop.getProperty("celId") - moutainImage = prop.getProperty("moutainImage") - collectionName = prop.getProperty("collectionName") - ppeImage = prop.getProperty("ppeImage") - bucketName = prop.getProperty("bucketName") - textImage = prop.getProperty("textImage") - modImage = prop.getProperty("modImage") - faceVid = prop.getProperty("faceVid") - topicArn = prop.getProperty("topicArn") - roleArn = prop.getProperty("roleArn") - modVid = prop.getProperty("modVid") - textVid = prop.getProperty("textVid") - celVid = prop.getProperty("celVid") - */ } @Test @Order(1) - fun detectFacesTest() = - runBlocking { - detectFacesinImage(facesImage) - println("Test 1 passed") - } - - @Test - @Order(2) - fun recognizeCelebritiesTest() = - runBlocking { - recognizeAllCelebrities(celebritiesImage) - println("Test 2 passed") - } - - @Test - @Order(3) - fun compareFacesTest() = - runBlocking { - compareTwoFaces(70f, facesImage, faceImage2) - println("Test 3 passed") - } - - @Test - @Order(4) fun celebrityInfoTest() = runBlocking { getCelebrityInfo(celId) - println("Test 4 passed") + logger.info("Test 1 passed") } @Test - @Order(5) - fun detectLabelsTest() = - runBlocking { - detectImageLabels(moutainImage) - println("Test 5 passed") - } - - @Test - @Order(6) + @Order(2) fun createCollectionTest() = runBlocking { createMyCollection(collectionName) - println("Test 6 passed") - } - - @Test - @Order(7) - fun addFacesToCollectionTest() = - runBlocking { - addToCollection(collectionName, facesImage) - println("Test 7 passed") + logger.info("Test 2 passed") } @Test - @Order(8) + @Order(3) fun listFacesCollectionTest() = runBlocking { listAllCollections() - println("Test 8 passed") + logger.info("Test 3 passed") } @Test - @Order(9) + @Order(4) fun listCollectionsTest() = runBlocking { listAllCollections() - println("Test 9 passed") + logger.info("Test 4 passed") } @Test - @Order(10) + @Order(5) fun describeCollectionTest() = runBlocking { describeColl(collectionName) - println("Test 10 passed") + logger.info("Test 5 passed") } @Test - @Order(11) - fun detectPPETest() = - runBlocking { - displayGear(ppeImage) - println("Test 11 passed") - } - - @Test - @Order(12) - fun detectTextTest() = - runBlocking { - detectTextLabels(textImage) - println("Test 12 passed") - } - - @Test - @Order(13) - fun detectModerationLabelsTest() = - runBlocking { - detectModLabels(modImage) - println("Test 13 passed") - } - - @Test - @Order(14) + @Order(6) fun videoDetectFacesTest() = runBlocking { startFaceDetection(channel, bucketName, celVid) getFaceResults() - println("Test 14 passed") + logger.info("Test 6 passed") } @Test - @Order(15) + @Order(7) fun videoDetectInappropriateTest() = runBlocking { startModerationDetection(channel, bucketName, modVid) getModResults() - println("Test 15 passed") + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { @@ -233,7 +139,6 @@ class RekognitionTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/rekognition/src/test/resources/logback.xml b/kotlin/services/rekognition/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/rekognition/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/route53/build.gradle.kts b/kotlin/services/route53/build.gradle.kts index 51f7ca26de0..0c37366cdc5 100644 --- a/kotlin/services/route53/build.gradle.kts +++ b/kotlin/services/route53/build.gradle.kts @@ -27,14 +27,17 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:route53:1.2.28") - implementation("aws.sdk.kotlin:route53domains:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:route53") + implementation("aws.sdk.kotlin:route53domains") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { diff --git a/kotlin/services/route53/src/test/kotlin/Route53Test.kt b/kotlin/services/route53/src/test/kotlin/Route53Test.kt index b0afaae98f6..ecdf052ed50 100644 --- a/kotlin/services/route53/src/test/kotlin/Route53Test.kt +++ b/kotlin/services/route53/src/test/kotlin/Route53Test.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -30,11 +29,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class Route53Test { - val dash: String? = String(CharArray(80)).replace("\u0000", "-") + private val logger: Logger = LoggerFactory.getLogger(Route53Test::class.java) private var domainName = "" private var healthCheckId = "" private var hostedZoneId = "" @@ -61,20 +62,6 @@ class Route53Test { firstNameSc = values.firstNameSc.toString() lastNameSc = values.lastNameSc.toString() citySc = values.citySc.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - domainName = prop.getProperty("domainName") - domainSuggestionSc = prop.getProperty("domainSuggestionSc") - domainTypeSc = prop.getProperty("domainTypeSc") - phoneNumerSc = prop.getProperty("phoneNumerSc") - emailSc = prop.getProperty("emailSc") - firstNameSc = prop.getProperty("firstNameSc") - lastNameSc = prop.getProperty("lastNameSc") - citySc = prop.getProperty("citySc") - */ } @Test @@ -83,8 +70,7 @@ class Route53Test { runBlocking { healthCheckId = createCheck(domainName).toString() Assertions.assertFalse(healthCheckId.isEmpty()) - println("The health check id is $healthCheckId") - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -93,8 +79,7 @@ class Route53Test { runBlocking { hostedZoneId = createZone(domainName).toString() Assertions.assertFalse(hostedZoneId.isEmpty()) - println("The hosted zone id is $hostedZoneId") - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -102,7 +87,7 @@ class Route53Test { fun listHealthChecks() = runBlocking { listAllHealthChecks() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -110,7 +95,7 @@ class Route53Test { fun updateHealthCheck() = runBlocking { updateSpecificHealthCheck(healthCheckId) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -118,7 +103,7 @@ class Route53Test { fun listHostedZones() = runBlocking { listZones() - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -126,59 +111,24 @@ class Route53Test { fun deleteHealthCheck() = runBlocking { delHealthCheck(healthCheckId) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @Order(7) fun fullScenarioTest() = runBlocking { - println(dash) - println("1. List current domains.") listDomains() - println(dash) - - println(dash) - println("2. List operations in the past year.") listOperations() - println(dash) - - println(dash) - println("3. View billing for the account in the past year.") listBillingRecords() - println(dash) - - println(dash) - println("4. View prices for domain types.") listAllPrices(domainTypeSc) - println(dash) - - println(dash) - println("5. Get domain suggestions.") listDomainSuggestions(domainSuggestionSc) - println(dash) - - println(dash) - println("6. Check domain availability.") checkDomainAvailability(domainSuggestionSc) - println(dash) - - println(dash) - println("7. Check domain transferability.") checkDomainTransferability(domainSuggestionSc) - println(dash) - - println(dash) - println("8. Request a domain registration.") val opId = requestDomainRegistration(domainSuggestionSc, phoneNumerSc, emailSc, firstNameSc, lastNameSc, citySc) opId?.let { Assertions.assertFalse(it.isEmpty()) } - println(dash) - - println(dash) - println("9. Get operation details.") getOperationalDetail(opId) - println(dash) - println("Test 7 passed") + logger.info("Test 7 passed") } private suspend fun getSecretValues(): String { @@ -189,7 +139,6 @@ class Route53Test { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/route53/src/test/resources/logback.xml b/kotlin/services/route53/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/route53/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/s3/build.gradle.kts b/kotlin/services/s3/build.gradle.kts index 2de9daf041a..eee05ec6460 100644 --- a/kotlin/services/s3/build.gradle.kts +++ b/kotlin/services/s3/build.gradle.kts @@ -27,7 +27,7 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") -val kotlinSdkVersion = "1.0.41" +val kotlinSdkVersion = "1.3.112" val smithyKotlinVersion = "1.0.10" dependencies { implementation("aws.sdk.kotlin:s3:$kotlinSdkVersion") diff --git a/kotlin/services/s3/src/main/kotlin/com/kotlin/s3/MrapExample.kt b/kotlin/services/s3/src/main/kotlin/com/kotlin/s3/MrapExample.kt index f432f64b523..0714df6f7d4 100644 --- a/kotlin/services/s3/src/main/kotlin/com/kotlin/s3/MrapExample.kt +++ b/kotlin/services/s3/src/main/kotlin/com/kotlin/s3/MrapExample.kt @@ -27,7 +27,7 @@ import aws.sdk.kotlin.services.s3control.model.Region import aws.sdk.kotlin.services.sts.StsClient import aws.sdk.kotlin.services.sts.getCallerIdentity import aws.sdk.kotlin.services.sts.model.GetCallerIdentityRequest -import aws.smithy.kotlin.runtime.auth.awssigning.crt.CrtAwsSigner +import aws.smithy.kotlin.runtime.auth.awssigning.DefaultAwsSigner import aws.smithy.kotlin.runtime.content.ByteStream import aws.smithy.kotlin.runtime.content.decodeToString import aws.smithy.kotlin.runtime.http.auth.SigV4AsymmetricAuthScheme @@ -204,10 +204,10 @@ class MrapExample { companion object { // snippet-start:[s3.kotlin.mrap.create-s3client] suspend fun createS3Client(): S3Client { - // Configure your S3Client to use the Asymmetric Sigv4 (Sigv4a) signing algorithm. - val sigV4AScheme = SigV4AsymmetricAuthScheme(CrtAwsSigner) + // Configure your S3Client to use the Asymmetric SigV4 (SigV4a) signing algorithm. + val sigV4aScheme = SigV4AsymmetricAuthScheme(DefaultAwsSigner) val s3 = S3Client.fromEnvironment { - authSchemes = listOf(sigV4AScheme) + authSchemes = listOf(sigV4aScheme) } return s3 } diff --git a/kotlin/services/s3/src/test/resources/logback.xml b/kotlin/services/s3/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/s3/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/sagemaker/build.gradle.kts b/kotlin/services/sagemaker/build.gradle.kts index b0a9cd8fc24..4368450e40e 100644 --- a/kotlin/services/sagemaker/build.gradle.kts +++ b/kotlin/services/sagemaker/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:sagemaker:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:sagemaker") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/sagemaker/src/test/kotlin/SageMakerTest.kt b/kotlin/services/sagemaker/src/test/kotlin/SageMakerTest.kt index bfcf358e2f2..379f90f3616 100644 --- a/kotlin/services/sagemaker/src/test/kotlin/SageMakerTest.kt +++ b/kotlin/services/sagemaker/src/test/kotlin/SageMakerTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -22,11 +21,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class SageMakerTest { + private val logger: Logger = LoggerFactory.getLogger(SageMakerTest::class.java) private var image = "" private var modelDataUrl = "" private var executionRoleArn = "" @@ -57,28 +59,6 @@ class SageMakerTest { s3OutputPath = values.s3OutputPath.toString() channelName = values.channelName.toString() trainingImage = values.trainingImage.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - image = prop.getProperty("image") - modelDataUrl = prop.getProperty("modelDataUrl") - executionRoleArn = prop.getProperty("executionRoleArn") - modelName = prop.getProperty("modelName") - s3UriData = prop.getProperty("s3UriData") - s3Uri = prop.getProperty("s3Uri") - roleArn = prop.getProperty("roleArn") - trainingJobName = prop.getProperty("trainingJobName") - s3OutputPath = prop.getProperty("s3OutputPath") - channelName = prop.getProperty("channelName") - trainingImage = prop.getProperty("trainingImage") - s3UriTransform = prop.getProperty("s3UriTransform") - s3OutputPathTransform = prop.getProperty("s3OutputPathTransform") - transformJobName = prop.getProperty("transformJobName") - */ } @Test @@ -86,7 +66,7 @@ class SageMakerTest { fun createModelTest() = runBlocking { createSagemakerModel(modelDataUrl, image, modelName, executionRoleArn) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -94,7 +74,7 @@ class SageMakerTest { fun createTrainingJobTest() = runBlocking { trainJob(s3UriData, s3Uri, trainingJobName, roleArn, s3OutputPath, channelName, trainingImage) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -102,7 +82,7 @@ class SageMakerTest { fun describeTrainingJobTest() = runBlocking { describeTrainJob(trainingJobName) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -110,7 +90,7 @@ class SageMakerTest { fun listModelsTest() = runBlocking { listAllModels() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -118,7 +98,7 @@ class SageMakerTest { fun listNotebooksTest() = runBlocking { listBooks() - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -126,7 +106,7 @@ class SageMakerTest { fun listAlgorithmsTest() = runBlocking { listAlgs() - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -134,7 +114,7 @@ class SageMakerTest { fun listTrainingJobsTest() = runBlocking { listJobs() - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -142,7 +122,7 @@ class SageMakerTest { fun deleteModelTest() = runBlocking { deleteSagemakerModel(modelName) - println("Test 8 passed") + logger.info("Test 8 passed") } private suspend fun getSecretValues(): String { @@ -154,7 +134,6 @@ class SageMakerTest { SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/sagemaker/src/test/resources/logback.xml b/kotlin/services/sagemaker/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/sagemaker/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/secrets-manager/build.gradle.kts b/kotlin/services/secrets-manager/build.gradle.kts index 099e81082d1..c2348b2d40c 100644 --- a/kotlin/services/secrets-manager/build.gradle.kts +++ b/kotlin/services/secrets-manager/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { diff --git a/kotlin/services/secrets-manager/src/test/kotlin/SecretsManagerKotlinTest.kt b/kotlin/services/secrets-manager/src/test/kotlin/SecretsManagerKotlinTest.kt index a3744bdec7d..71eb563cfd8 100644 --- a/kotlin/services/secrets-manager/src/test/kotlin/SecretsManagerKotlinTest.kt +++ b/kotlin/services/secrets-manager/src/test/kotlin/SecretsManagerKotlinTest.kt @@ -7,10 +7,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class SecretsManagerKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(SecretsManagerKotlinTest::class.java) private var secretName = "mysecret" @Test @@ -18,6 +21,6 @@ class SecretsManagerKotlinTest { fun getSecretValue() = runBlocking { getValue(secretName) - println("Test 1 passed") + logger.info("Test 1 passed") } } diff --git a/kotlin/services/secrets-manager/src/test/resources/logback.xml b/kotlin/services/secrets-manager/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/secrets-manager/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/ses/build.gradle.kts b/kotlin/services/ses/build.gradle.kts index c56fba2b209..33916c51ec8 100644 --- a/kotlin/services/ses/build.gradle.kts +++ b/kotlin/services/ses/build.gradle.kts @@ -27,16 +27,19 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:ses:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:ses") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") implementation("javax.mail:javax.mail-api:1.6.2") implementation("com.sun.mail:javax.mail:1.6.2") implementation("javax.activation:activation:1.1.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { diff --git a/kotlin/services/ses/src/test/kotlin/SESTest.kt b/kotlin/services/ses/src/test/kotlin/SESTest.kt index f7e935dac7f..7ef323aadb7 100644 --- a/kotlin/services/ses/src/test/kotlin/SESTest.kt +++ b/kotlin/services/ses/src/test/kotlin/SESTest.kt @@ -1,13 +1,11 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson import com.kotlin.ses.listSESIdentities import com.kotlin.ses.send -import com.kotlin.ses.sendemailAttachment import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.DisplayName @@ -17,21 +15,18 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class SESTest { + private val logger: Logger = LoggerFactory.getLogger(SESTest::class.java) private var sender = "" private var recipient = "" private var subject = "" private var fileLocation = "" - private val bodyText = - """ - Hello, - Please see the attached file for a list of customers to contact. - """.trimIndent() - private val bodyHTML = """ @@ -53,18 +48,6 @@ class SESTest { recipient = values.recipient.toString() subject = values.subject.toString() fileLocation = values.fileLocation.toString() - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - sender = prop.getProperty("sender") - recipient = prop.getProperty("recipient") - subject = prop.getProperty("subject") - fileLocation = prop.getProperty("fileLocation") - */ } @Test @@ -72,23 +55,15 @@ class SESTest { fun sendMessageTest() = runBlocking { send(sender, recipient, subject, bodyHTML) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @Order(2) - fun sendMessageAttTest() = - runBlocking { - sendemailAttachment(sender, recipient, subject, bodyText, bodyHTML, fileLocation) - println("Test 2 passed") - } - - @Test - @Order(3) fun listIdentitiesTest() = runBlocking { listSESIdentities() - println("Test 3 passed") + logger.info("Test 2 passed") } private suspend fun getSecretValues(): String { @@ -99,7 +74,6 @@ class SESTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/ses/src/test/resources/logback.xml b/kotlin/services/ses/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/ses/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/sns/build.gradle.kts b/kotlin/services/sns/build.gradle.kts index 3e83260c257..87ea3e709f8 100644 --- a/kotlin/services/sns/build.gradle.kts +++ b/kotlin/services/sns/build.gradle.kts @@ -29,13 +29,16 @@ repositories { apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:sns:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:sns") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/sns/src/test/kotlin/SNSTest.kt b/kotlin/services/sns/src/test/kotlin/SNSTest.kt index 20e1097d20b..a1d0a68b00a 100644 --- a/kotlin/services/sns/src/test/kotlin/SNSTest.kt +++ b/kotlin/services/sns/src/test/kotlin/SNSTest.kt @@ -1,6 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider + import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -26,11 +26,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class SNSTest { + private val logger: Logger = LoggerFactory.getLogger(SNSTest::class.java) private var topicName = "" private var topicArn = "" // This value is dynamically set private var subArn = "" // This value is dynamically set @@ -57,21 +60,6 @@ class SNSTest { lambdaarn = values.lambdaarn.toString() phone = values.phone.toString() message = values.message.toString() - - /* - // load the properties file. - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - topicName = prop.getProperty("topicName") - attributeName = prop.getProperty("attributeName") - attributeValue = prop.getProperty("attributeValue") - email = prop.getProperty("email") - lambdaarn = prop.getProperty("lambdaarn") - phone = prop.getProperty("phone") - message = prop.getProperty("message") - existingsubarn = prop.getProperty("existingsubarn") - */ } @Test @@ -80,7 +68,7 @@ class SNSTest { runBlocking { topicArn = createSNSTopic(topicName) Assertions.assertTrue(!topicArn.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -88,7 +76,7 @@ class SNSTest { fun listTopicsTest() = runBlocking { listSNSTopics() - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -96,7 +84,7 @@ class SNSTest { fun setTopicAttributesTest() = runBlocking { setTopAttr(attributeName, topicArn, attributeValue) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -104,7 +92,7 @@ class SNSTest { fun subscribeEmailTest() = runBlocking { subEmail(topicArn, email) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -112,7 +100,7 @@ class SNSTest { fun subscribeLambdaTest() = runBlocking { subLambda(topicArn, lambdaarn) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -120,7 +108,7 @@ class SNSTest { fun addTagsTest() = runBlocking { addTopicTags(topicArn) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -128,7 +116,7 @@ class SNSTest { fun listTagsTest() = runBlocking { listTopicTags(topicArn) - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -136,7 +124,7 @@ class SNSTest { fun deleteTagTest() = runBlocking { removeTag(topicArn, "Team") - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -144,7 +132,7 @@ class SNSTest { fun subEmailTest() = runBlocking { subEmail(topicArn, email) - println("Test 10 passed") + logger.info("Test 10 passed") } @Test @@ -152,7 +140,7 @@ class SNSTest { fun pubTopicTest() = runBlocking { pubTopic(topicArn, message) - println("Test 11 passed") + logger.info("Test 11 passed") } @Test @@ -160,7 +148,7 @@ class SNSTest { fun listSubsTest() = runBlocking { listSNSSubscriptions() - println("Test 12 passed") + logger.info("Test 12 passed") } @Test @@ -168,7 +156,7 @@ class SNSTest { fun subscribeTextSMSTest() = runBlocking { subTextSNS(topicArn, phone) - println("Test 14 passed") + logger.info("Test 14 passed") } @Test @@ -176,7 +164,7 @@ class SNSTest { fun deleteTopicTest() = runBlocking { deleteSNSTopic(topicArn) - println("Test 15 passed") + logger.info("Test 15 passed") } private suspend fun getSecretValues(): String { @@ -187,7 +175,6 @@ class SNSTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/sns/src/test/resources/logback.xml b/kotlin/services/sns/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/sns/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/sqs/build.gradle.kts b/kotlin/services/sqs/build.gradle.kts index 0ff57c2cb94..3c86901ac38 100644 --- a/kotlin/services/sqs/build.gradle.kts +++ b/kotlin/services/sqs/build.gradle.kts @@ -27,16 +27,19 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:sqs:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:sqs") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") implementation(kotlin("reflect")) implementation("com.fasterxml.jackson.core:jackson-core:2.14.2") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/sqs/src/test/kotlin/SQSTest.kt b/kotlin/services/sqs/src/test/kotlin/SQSTest.kt index 4df34865a49..7a4ec1d2d3d 100644 --- a/kotlin/services/sqs/src/test/kotlin/SQSTest.kt +++ b/kotlin/services/sqs/src/test/kotlin/SQSTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -24,11 +23,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class SQSTest { + private val logger: Logger = LoggerFactory.getLogger(SQSTest::class.java) private var queueName = "" private var message = "" private var queueUrl = "" @@ -45,16 +47,6 @@ class SQSTest { val queueMessage = gson.fromJson(json, QueueMessage::class.java) queueName = queueMessage.queueName.toString() + randomNum message = queueMessage.message.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - queueName = prop.getProperty("QueueName") - message = prop.getProperty("Message") - */ } @Test @@ -63,7 +55,7 @@ class SQSTest { runBlocking { queueUrl = createQueue(queueName) Assertions.assertTrue(!queueUrl.isEmpty()) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -71,7 +63,7 @@ class SQSTest { fun sendMessageTest() = runBlocking { sendMessages(queueUrl, message) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -79,7 +71,7 @@ class SQSTest { fun sendBatchMessagesTest() = runBlocking { sendBatchMessages(queueUrl) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -87,7 +79,7 @@ class SQSTest { fun getMessageTest() = runBlocking { receiveMessages(queueUrl) - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -95,7 +87,7 @@ class SQSTest { fun addQueueTagsTest() = runBlocking { addTags(queueName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -103,7 +95,7 @@ class SQSTest { fun listQueueTagsTest() = runBlocking { listTags(queueName) - println("Test 6 passed") + logger.info("Test 6 passed") } @Test @@ -111,7 +103,7 @@ class SQSTest { fun removeQueueTagsTest() = runBlocking { removeTag(queueName, "Test") - println("Test 7 passed") + logger.info("Test 7 passed") } @Test @@ -119,7 +111,7 @@ class SQSTest { fun deleteMessagesTest() = runBlocking { deleteMessages(queueUrl) - println("Test 8 passed") + logger.info("Test 8 passed") } @Test @@ -127,7 +119,7 @@ class SQSTest { fun deleteQueueTest() = runBlocking { deleteQueue(queueUrl) - println("Test 9 passed") + logger.info("Test 9 passed") } private suspend fun getSecretValues(): String { @@ -138,7 +130,6 @@ class SQSTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/sqs/src/test/resources/logback.xml b/kotlin/services/sqs/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/sqs/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/stepfunctions/build.gradle.kts b/kotlin/services/stepfunctions/build.gradle.kts index ff7e17570b5..43a8b29d8b2 100644 --- a/kotlin/services/stepfunctions/build.gradle.kts +++ b/kotlin/services/stepfunctions/build.gradle.kts @@ -27,16 +27,19 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:sfn:1.2.28") - implementation("aws.sdk.kotlin:iam:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:sfn") + implementation("aws.sdk.kotlin:iam") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") implementation("com.googlecode.json-simple:json-simple:1.1.1") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("com.fasterxml.jackson.core:jackson-databind:2.14.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/GetStream.kt b/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/GetStream.kt index 2ae11749b11..d365e9fb6c6 100644 --- a/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/GetStream.kt +++ b/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/GetStream.kt @@ -4,12 +4,13 @@ package com.kotlin.stepfunctions import com.fasterxml.jackson.databind.JsonNode import com.fasterxml.jackson.databind.ObjectMapper +import java.io.FileInputStream import java.io.InputStream class GetStream { - suspend fun getStream(): String { + suspend fun getStream(location: String): String { // Get JSON to use for the state machine and place the activityArn value into it. - val input: InputStream = this::class.java.classLoader.getResourceAsStream("chat_sfn_state_machine.json") + val input: InputStream = FileInputStream(location) val mapper = ObjectMapper() val jsonNode: JsonNode = mapper.readValue(input, JsonNode::class.java) return mapper.writeValueAsString(jsonNode) diff --git a/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/StepFunctionsScenario.kt b/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/StepFunctionsScenario.kt index ed356ab8a01..c666510d832 100644 --- a/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/StepFunctionsScenario.kt +++ b/kotlin/services/stepfunctions/src/main/kotlin/com/kotlin/stepfunctions/StepFunctionsScenario.kt @@ -67,9 +67,10 @@ suspend fun main(args: Array) { roleName - The name of the IAM role to create for this state machine. activityName - The name of an activity to create. stateMachineName - The name of the state machine to create. + jsonFile - The location of the chat_sfn_state_machine.json file. You can located it in resources/sample_files. """ - if (args.size != 3) { + if (args.size != 4) { println(usage) exitProcess(0) } @@ -77,6 +78,7 @@ suspend fun main(args: Array) { val roleName = args[0] val activityName = args[1] val stateMachineName = args[2] + val jsonFile = args[3] val sc = Scanner(System.`in`) var action = false @@ -116,7 +118,7 @@ suspend fun main(args: Array) { // Get JSON to use for the state machine and place the activityArn value into it. val stream = GetStream() - val jsonString = stream.getStream() + val jsonString = stream.getStream(jsonFile) // Modify the Resource node. val objectMapper = ObjectMapper() @@ -258,14 +260,14 @@ suspend fun describeExe(executionArnVal: String?) { SfnClient { region = "us-east-1" }.use { sfnClient -> val response = sfnClient.describeExecution(executionRequest) status = response.status.toString() - if (status.compareTo("RUNNING") == 0) { + if (status.compareTo("Running") == 0) { println("The state machine is still running, let's wait for it to finish.") Thread.sleep(2000) - } else if (status.compareTo("SUCCEEDED") == 0) { + } else if (status.compareTo("Succeeded") == 0) { println("The Step Function workflow has succeeded") hasSucceeded = true } else { - println("The Status is neither running or succeeded") + println("The Status is $status") } } } diff --git a/kotlin/services/stepfunctions/src/test/kotlin/StepFunctionsKotlinTest.kt b/kotlin/services/stepfunctions/src/test/kotlin/StepFunctionsKotlinTest.kt index 961ac36994d..494e2bf64b1 100644 --- a/kotlin/services/stepfunctions/src/test/kotlin/StepFunctionsKotlinTest.kt +++ b/kotlin/services/stepfunctions/src/test/kotlin/StepFunctionsKotlinTest.kt @@ -1,28 +1,10 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest -import com.fasterxml.jackson.databind.JsonNode -import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.databind.node.ObjectNode import com.google.gson.Gson -import com.kotlin.stepfunctions.DASHES -import com.kotlin.stepfunctions.GetStream -import com.kotlin.stepfunctions.createActivity -import com.kotlin.stepfunctions.createIAMRole -import com.kotlin.stepfunctions.createMachine -import com.kotlin.stepfunctions.deleteActivity -import com.kotlin.stepfunctions.deleteMachine -import com.kotlin.stepfunctions.describeExe -import com.kotlin.stepfunctions.describeStateMachine -import com.kotlin.stepfunctions.getActivityTask -import com.kotlin.stepfunctions.listActivitesPagnator import com.kotlin.stepfunctions.listMachines -import com.kotlin.stepfunctions.listStatemachinesPagnator -import com.kotlin.stepfunctions.sendTaskSuccess -import com.kotlin.stepfunctions.startWorkflow import kotlinx.coroutines.runBlocking import org.junit.jupiter.api.BeforeAll import org.junit.jupiter.api.DisplayName @@ -32,12 +14,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder -import java.util.Scanner +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class StepFunctionsKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(StepFunctionsKotlinTest::class.java) private var roleNameSC = "" private var activityNameSC = "" private var stateMachineNameSC = "" @@ -54,128 +38,14 @@ class StepFunctionsKotlinTest { activityNameSC = values.activityNameSC.toString() + UUID.randomUUID() stateMachineNameSC = values.stateMachineNameSC.toString() + UUID.randomUUID() jsonFile = values.machineFile.toString() - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") as InputStream - val prop = Properties() - prop.load(input) - jsonFile = prop.getProperty("jsonFile") - jsonFileSM = prop.getProperty("jsonFileSM") - roleARN = prop.getProperty("roleARN") - stateMachineName = prop.getProperty("stateMachineName") - roleNameSC = prop.getProperty("roleNameSC") - activityNameSC = prop.getProperty("activityNameSC") - stateMachineNameSC = prop.getProperty("stateMachineNameSC") - */ } @Test - @Order(2) + @Order(1) fun listStateMachines() = runBlocking { listMachines() - println("Test 4 passed") - } - - @Test - @Order(2) - fun testMVP() = - runBlocking { - val sc = Scanner(System.`in`) - var action = false - - val polJSON = """{ - "Version": "2012-10-17", - "Statement": [ - { - "Sid": "", - "Effect": "Allow", - "Principal": { - "Service": "states.amazonaws.com" - }, - "Action": "sts:AssumeRole" - } - ] - }""" - - println(DASHES) - println("List activities using a Paginator.") - listActivitesPagnator() - println("Create an activity.") - val activityArn = createActivity(activityNameSC) - println("The ARN of the Activity is $activityArn") - - println("List state machines using a paginator.") - listStatemachinesPagnator() - println(DASHES) - - // Get JSON to use for the state machine and place the activityArn value into it. - val stream = GetStream() - val jsonString = stream.getStream() - - // Modify the Resource node. - val objectMapper = ObjectMapper() - val root: JsonNode = objectMapper.readTree(jsonString) - (root.path("States").path("GetInput") as ObjectNode).put("Resource", activityArn) - - // Convert the modified Java object back to a JSON string. - val stateDefinition = objectMapper.writeValueAsString(root) - println(stateDefinition) - - println(DASHES) - println("Create a state machine.") - val roleARN = createIAMRole(roleNameSC, polJSON) - val stateMachineArn = createMachine(roleARN, stateMachineNameSC, stateDefinition) - println("The ARN of the state machine is $stateMachineArn") - println("The ARN of the state machine is") - println(DASHES) - - println(DASHES) - println("Describe the state machine.") - describeStateMachine(stateMachineArn) - println("What should ChatSFN call you?") - val userName = "foo" - println("Hello $userName") - println(DASHES) - - println(DASHES) - // The JSON to pass to the StartExecution call. - val executionJson = "{ \"name\" : \"$userName\" }" - println(executionJson) - println("Start execution of the state machine and interact with it.") - val runArn = startWorkflow(stateMachineArn, executionJson) - println("The ARN of the state machine execution is $runArn") - var myList: List - while (!action) { - myList = getActivityTask(activityArn) - println("ChatSFN: " + myList[1]) - println("$userName please specify a value.") - val myAction = "done" - action = true - val taskJson = "{ \"action\" : \"$myAction\" }" - println(taskJson) - sendTaskSuccess(myList[0], taskJson) - } - println(DASHES) - - println(DASHES) - println("Describe the execution.") - describeExe(runArn) - println(DASHES) - - println(DASHES) - println("Delete the activity.") - deleteActivity(activityArn) - println(DASHES) - - println(DASHES) - println("Delete the state machines.") - deleteMachine(stateMachineArn) - println(DASHES) - - println(DASHES) - println("The AWS Step Functions example scenario is complete.") - println(DASHES) - println("Test 4 passed") + logger.info("Test 1 passed") } private suspend fun getSecretValues(): String { @@ -186,7 +56,6 @@ class StepFunctionsKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/stepfunctions/src/test/resources/logback.xml b/kotlin/services/stepfunctions/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/stepfunctions/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/sts/build.gradle.kts b/kotlin/services/sts/build.gradle.kts index 91183182cc1..4cf2466aa67 100644 --- a/kotlin/services/sts/build.gradle.kts +++ b/kotlin/services/sts/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:sts:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:sts") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/sts/src/test/kotlin/Test2.kt b/kotlin/services/sts/src/test/kotlin/STSTest.kt similarity index 78% rename from kotlin/services/sts/src/test/kotlin/Test2.kt rename to kotlin/services/sts/src/test/kotlin/STSTest.kt index ed5247b9325..e7ed3c9f411 100644 --- a/kotlin/services/sts/src/test/kotlin/Test2.kt +++ b/kotlin/services/sts/src/test/kotlin/STSTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -18,10 +17,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) -class Test2 { +class STSTest { + private val logger: Logger = LoggerFactory.getLogger(STSTest::class.java) private var roleArn = "" private var accessKeyId = "" private var roleSessionName = "" @@ -36,49 +38,38 @@ class Test2 { roleArn = values.roleArn.toString() accessKeyId = values.accessKeyId.toString() roleSessionName = values.roleSessionName.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - - // Populate the data members required for all tests. - roleArn = prop.getProperty("roleArn") - accessKeyId = prop.getProperty("accessKeyId") - roleSessionName = prop.getProperty("roleSessionName") - */ } @Test - @Order(2) + @Order(1) fun assumeRoleTest() = runBlocking { assumeGivenRole(roleArn, roleSessionName) - println("Test 2 passed") + logger.info("Test 1 passed") } @Test - @Order(3) + @Order(2) fun getSessionTokenTest() = runBlocking { getToken() - println("Test 3 passed") + logger.info("Test 2 passed") } @Test - @Order(4) + @Order(3) fun getCallerIdentityTest() = runBlocking { getCallerId() - println("Test 4 passed") + logger.info("Test 3 passed") } @Test - @Order(5) + @Order(4) fun getAccessKeyInfoTest() = runBlocking { getKeyInfo(accessKeyId) - println("Test 5 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -89,7 +80,6 @@ class Test2 { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/sts/src/test/resources/logback.xml b/kotlin/services/sts/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/sts/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/support/build.gradle.kts b/kotlin/services/support/build.gradle.kts index 10255a89550..667dd03221c 100644 --- a/kotlin/services/support/build.gradle.kts +++ b/kotlin/services/support/build.gradle.kts @@ -27,12 +27,28 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:support:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") - testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:support") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") + implementation("com.google.code.gson:gson:2.10") + testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" } + +tasks.test { + useJUnitPlatform() + testLogging { + events("passed", "skipped", "failed") + } + + // Define the test source set + testClassesDirs += files("build/classes/kotlin/test") + classpath += files("build/classes/kotlin/main", "build/resources/main") +} diff --git a/kotlin/services/support/src/test/kotlin/SupportTest.kt b/kotlin/services/support/src/test/kotlin/SupportTest.kt index e0109fc5a12..cfc49aab35a 100644 --- a/kotlin/services/support/src/test/kotlin/SupportTest.kt +++ b/kotlin/services/support/src/test/kotlin/SupportTest.kt @@ -8,15 +8,19 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(MethodOrderer.OrderAnnotation::class) class SupportTest { + private val logger: Logger = LoggerFactory.getLogger(SupportTest::class.java) + @Test @Order(1) fun supportHelloScenario() = runBlocking { displaySomeServices() - println("\n AWS Support Hello Test passed") + logger.info("\n AWS Support Hello Test passed") } } diff --git a/kotlin/services/support/src/test/resources/logback.xml b/kotlin/services/support/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/support/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/textract/build.gradle.kts b/kotlin/services/textract/build.gradle.kts index 224dbb0ef99..99a750b840c 100644 --- a/kotlin/services/textract/build.gradle.kts +++ b/kotlin/services/textract/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:textract:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:textract") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/textract/src/main/kotlin/com/kotlin/textract/StartDocumentAnalysis.kt b/kotlin/services/textract/src/main/kotlin/com/kotlin/textract/StartDocumentAnalysis.kt index ee0a67506db..7a848fd9444 100644 --- a/kotlin/services/textract/src/main/kotlin/com/kotlin/textract/StartDocumentAnalysis.kt +++ b/kotlin/services/textract/src/main/kotlin/com/kotlin/textract/StartDocumentAnalysis.kt @@ -94,7 +94,7 @@ private suspend fun getJobResults( val response = textractClient.getDocumentAnalysis(analysisRequest) status = response.jobStatus.toString() - if (status.compareTo("SUCCEEDED") == 0) { + if (status.compareTo("Succeeded") == 0) { finished = true } else { println("$index status is: $status") diff --git a/kotlin/services/textract/src/test/kotlin/TextractTest.kt b/kotlin/services/textract/src/test/kotlin/TextractTest.kt index 6aea7cc73f9..4b139322fad 100644 --- a/kotlin/services/textract/src/test/kotlin/TextractTest.kt +++ b/kotlin/services/textract/src/test/kotlin/TextractTest.kt @@ -1,6 +1,5 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -17,10 +16,13 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class TextractTest { + private val logger: Logger = LoggerFactory.getLogger(TextractTest::class.java) private var sourceDoc = "" private var bucketName = "" private var docName = "" @@ -35,19 +37,6 @@ class TextractTest { sourceDoc = values.sourceDoc.toString() bucketName = values.bucketName.toString() docName = values.docName.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - - // load the properties file. - prop.load(input) - - // Populate the data members required for all tests - sourceDoc = prop.getProperty("sourceDoc") - bucketName = prop.getProperty("bucketName") - docName = prop.getProperty("docName") - */ } @Test @@ -55,7 +44,7 @@ class TextractTest { fun analyzeDocumentTest() = runBlocking { analyzeDoc(sourceDoc) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -63,7 +52,7 @@ class TextractTest { fun detectDocumentTextTest() = runBlocking { detectDocText(sourceDoc) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -71,7 +60,7 @@ class TextractTest { fun detectDocumentTextS3Test() = runBlocking { detectDocTextS3(bucketName, docName) - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -79,7 +68,7 @@ class TextractTest { fun startDocumentAnalysisTest() = runBlocking { startDocAnalysisS3(bucketName, docName) - println("Test 4 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -91,7 +80,6 @@ class TextractTest { SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/textract/src/test/resources/logback.xml b/kotlin/services/textract/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/textract/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/translate/build.gradle.kts b/kotlin/services/translate/build.gradle.kts index 2e03d8329ca..b6bc19b1548 100644 --- a/kotlin/services/translate/build.gradle.kts +++ b/kotlin/services/translate/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:translate:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:translate") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.2") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/translate/src/main/kotlin/com/kotlin/translate/BatchTranslation.kt b/kotlin/services/translate/src/main/kotlin/com/kotlin/translate/BatchTranslation.kt index 35d63a55f5c..5f974a9cb97 100644 --- a/kotlin/services/translate/src/main/kotlin/com/kotlin/translate/BatchTranslation.kt +++ b/kotlin/services/translate/src/main/kotlin/com/kotlin/translate/BatchTranslation.kt @@ -95,7 +95,7 @@ suspend fun translateDocuments( jobStatus = response.textTranslationJobProperties?.jobStatus.toString() println(jobStatus) - if (jobStatus.contains("COMPLETED")) { + if (jobStatus.contains("Completed")) { break } else { print(".") diff --git a/kotlin/services/translate/src/test/kotlin/TranslateKotlinTest.kt b/kotlin/services/translate/src/test/kotlin/TranslateKotlinTest.kt index d2b3afc869e..3db8628444a 100644 --- a/kotlin/services/translate/src/test/kotlin/TranslateKotlinTest.kt +++ b/kotlin/services/translate/src/test/kotlin/TranslateKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -19,11 +18,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.UUID @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class TranslateKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(TranslateKotlinTest::class.java) private var s3Uri = "" private var s3UriOut = "" private var jobName = "" @@ -41,18 +43,6 @@ class TranslateKotlinTest { s3UriOut = values.s3UriOut.toString() jobName = values.jobName.toString() + UUID.randomUUID() dataAccessRoleArn = values.dataAccessRoleArn.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - - // Populate the data members required for all tests. - s3Uri = prop.getProperty("s3Uri") - s3UriOut = prop.getProperty("s3UriOut") - jobName = prop.getProperty("jobName") - dataAccessRoleArn = prop.getProperty("dataAccessRoleArn") - */ } @Test @@ -60,7 +50,7 @@ class TranslateKotlinTest { fun translateTextTest() = runBlocking { textTranslate() - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -69,7 +59,7 @@ class TranslateKotlinTest { runBlocking { jobId = translateDocuments(s3Uri, s3UriOut, jobName, dataAccessRoleArn).toString() Assertions.assertTrue(!jobId.isEmpty()) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -77,7 +67,7 @@ class TranslateKotlinTest { fun listTextTranslationJobsTest() = runBlocking { getTranslationJobs() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -85,7 +75,7 @@ class TranslateKotlinTest { fun describeTextTranslationJobTest() = runBlocking { describeTranslationJob(jobId) - println("Test 4 passed") + logger.info("Test 4 passed") } private suspend fun getSecretValues(): String { @@ -96,7 +86,6 @@ class TranslateKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/translate/src/test/resources/logback.xml b/kotlin/services/translate/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/translate/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/services/xray/build.gradle.kts b/kotlin/services/xray/build.gradle.kts index 980ebfb26b9..f7682a215ff 100644 --- a/kotlin/services/xray/build.gradle.kts +++ b/kotlin/services/xray/build.gradle.kts @@ -27,13 +27,16 @@ repositories { } apply(plugin = "org.jlleitschuh.gradle.ktlint") dependencies { - implementation("aws.sdk.kotlin:xray:1.2.28") - implementation("aws.sdk.kotlin:secretsmanager:1.2.28") - implementation("aws.smithy.kotlin:http-client-engine-okhttp:0.30.0") - implementation("aws.smithy.kotlin:http-client-engine-crt:0.30.0") + implementation(platform("aws.sdk.kotlin:bom:1.3.112")) + implementation("aws.sdk.kotlin:xray") + implementation("aws.sdk.kotlin:secretsmanager") + implementation("aws.smithy.kotlin:http-client-engine-okhttp") + implementation("aws.smithy.kotlin:http-client-engine-crt") implementation("com.google.code.gson:gson:2.10") testImplementation("org.junit.jupiter:junit-jupiter:5.9.0") implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core:1.6.4") + implementation("org.slf4j:slf4j-api:2.0.15") + implementation("org.slf4j:slf4j-simple:2.0.15") } tasks.withType { kotlinOptions.jvmTarget = "17" diff --git a/kotlin/services/xray/src/test/kotlin/TranslateKotlinTest.kt b/kotlin/services/xray/src/test/kotlin/TranslateKotlinTest.kt deleted file mode 100644 index d2b3afc869e..00000000000 --- a/kotlin/services/xray/src/test/kotlin/TranslateKotlinTest.kt +++ /dev/null @@ -1,114 +0,0 @@ -// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -// SPDX-License-Identifier: Apache-2.0 - -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider -import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient -import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest -import com.google.gson.Gson -import com.kotlin.translate.describeTranslationJob -import com.kotlin.translate.getTranslationJobs -import com.kotlin.translate.textTranslate -import com.kotlin.translate.translateDocuments -import kotlinx.coroutines.runBlocking -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.BeforeAll -import org.junit.jupiter.api.DisplayName -import org.junit.jupiter.api.MethodOrderer.OrderAnnotation -import org.junit.jupiter.api.Nested -import org.junit.jupiter.api.Order -import org.junit.jupiter.api.Test -import org.junit.jupiter.api.TestInstance -import org.junit.jupiter.api.TestMethodOrder -import java.util.UUID - -@TestInstance(TestInstance.Lifecycle.PER_CLASS) -@TestMethodOrder(OrderAnnotation::class) -class TranslateKotlinTest { - private var s3Uri = "" - private var s3UriOut = "" - private var jobName = "" - private var dataAccessRoleArn = "" - private var jobId = "" - - @BeforeAll - fun setup() = - runBlocking { - // Get the values to run these tests from AWS Secrets Manager. - val gson = Gson() - val json: String = getSecretValues() - val values = gson.fromJson(json, SecretValues::class.java) - s3Uri = values.s3Uri.toString() - s3UriOut = values.s3UriOut.toString() - jobName = values.jobName.toString() + UUID.randomUUID() - dataAccessRoleArn = values.dataAccessRoleArn.toString() - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - - // Populate the data members required for all tests. - s3Uri = prop.getProperty("s3Uri") - s3UriOut = prop.getProperty("s3UriOut") - jobName = prop.getProperty("jobName") - dataAccessRoleArn = prop.getProperty("dataAccessRoleArn") - */ - } - - @Test - @Order(1) - fun translateTextTest() = - runBlocking { - textTranslate() - println("Test 1 passed") - } - - @Test - @Order(2) - fun batchTranslationTest() = - runBlocking { - jobId = translateDocuments(s3Uri, s3UriOut, jobName, dataAccessRoleArn).toString() - Assertions.assertTrue(!jobId.isEmpty()) - println("Test 2 passed") - } - - @Test - @Order(3) - fun listTextTranslationJobsTest() = - runBlocking { - getTranslationJobs() - println("Test 3 passed") - } - - @Test - @Order(4) - fun describeTextTranslationJobTest() = - runBlocking { - describeTranslationJob(jobId) - println("Test 4 passed") - } - - private suspend fun getSecretValues(): String { - val secretName = "test/translate" - val valueRequest = - GetSecretValueRequest { - secretId = secretName - } - SecretsManagerClient { - region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() - }.use { secretClient -> - val valueResponse = secretClient.getSecretValue(valueRequest) - return valueResponse.secretString.toString() - } - } - - @Nested - @DisplayName("A class used to get test values from test/translate (an AWS Secrets Manager secret)") - internal class SecretValues { - val s3Uri: String? = null - val s3UriOut: String? = null - val jobName: String? = null - val dataAccessRoleArn: String? = null - } -} diff --git a/kotlin/services/xray/src/test/kotlin/XrayKotlinTest.kt b/kotlin/services/xray/src/test/kotlin/XrayKotlinTest.kt index a30b04a5990..e7ee6f05770 100644 --- a/kotlin/services/xray/src/test/kotlin/XrayKotlinTest.kt +++ b/kotlin/services/xray/src/test/kotlin/XrayKotlinTest.kt @@ -1,7 +1,6 @@ // Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. // SPDX-License-Identifier: Apache-2.0 -import aws.sdk.kotlin.runtime.auth.credentials.EnvironmentCredentialsProvider import aws.sdk.kotlin.services.secretsmanager.SecretsManagerClient import aws.sdk.kotlin.services.secretsmanager.model.GetSecretValueRequest import com.google.gson.Gson @@ -20,11 +19,14 @@ import org.junit.jupiter.api.Order import org.junit.jupiter.api.Test import org.junit.jupiter.api.TestInstance import org.junit.jupiter.api.TestMethodOrder +import org.slf4j.Logger +import org.slf4j.LoggerFactory import java.util.Random @TestInstance(TestInstance.Lifecycle.PER_CLASS) @TestMethodOrder(OrderAnnotation::class) class XrayKotlinTest { + private val logger: Logger = LoggerFactory.getLogger(XrayKotlinTest::class.java) private var groupName = "" private var newGroupName = "" private var ruleName = "" @@ -42,17 +44,6 @@ class XrayKotlinTest { groupName = values.groupName.toString() newGroupName = values.newGroupName.toString() + randomNum ruleName = values.ruleName.toString() + randomNum - - // Uncomment this code block if you prefer using a config.properties file to retrieve AWS values required for these tests. - - /* - val input: InputStream = this.javaClass.getClassLoader().getResourceAsStream("config.properties") - val prop = Properties() - prop.load(input) - groupName = prop.getProperty("groupName") - newGroupName = prop.getProperty("newGroupName") - ruleName = prop.getProperty("ruleName") - */ } @Test @@ -60,7 +51,7 @@ class XrayKotlinTest { fun createGroup() = runBlocking { createNewGroup(newGroupName) - println("Test 1 passed") + logger.info("Test 1 passed") } @Test @@ -68,7 +59,7 @@ class XrayKotlinTest { fun createSamplingRule() = runBlocking { createRule(ruleName) - println("Test 2 passed") + logger.info("Test 2 passed") } @Test @@ -76,7 +67,7 @@ class XrayKotlinTest { fun getGroups() = runBlocking { getAllGroups() - println("Test 3 passed") + logger.info("Test 3 passed") } @Test @@ -84,7 +75,7 @@ class XrayKotlinTest { fun getSamplingRules() = runBlocking { getRules() - println("Test 4 passed") + logger.info("Test 4 passed") } @Test @@ -92,7 +83,7 @@ class XrayKotlinTest { fun deleteSamplingRule() = runBlocking { deleteRule(ruleName) - println("Test 5 passed") + logger.info("Test 5 passed") } @Test @@ -100,7 +91,7 @@ class XrayKotlinTest { fun deleteGroup() = runBlocking { deleteSpecificGroup(newGroupName) - println("Test 6 passed") + logger.info("Test 6 passed") } private suspend fun getSecretValues(): String { @@ -111,7 +102,6 @@ class XrayKotlinTest { } SecretsManagerClient { region = "us-east-1" - credentialsProvider = EnvironmentCredentialsProvider() }.use { secretClient -> val valueResponse = secretClient.getSecretValue(valueRequest) return valueResponse.secretString.toString() diff --git a/kotlin/services/xray/src/test/resources/logback.xml b/kotlin/services/xray/src/test/resources/logback.xml new file mode 100644 index 00000000000..ba2fdf4e176 --- /dev/null +++ b/kotlin/services/xray/src/test/resources/logback.xml @@ -0,0 +1,11 @@ + + + + %d{yyyy-MM-dd HH:mm:ss} [%thread] %-5level %logger{36} - %msg%n + + + + + + + \ No newline at end of file diff --git a/kotlin/usecases/topics_and_queues/src/test/kotlin/AWSSNSTest.kt b/kotlin/usecases/topics_and_queues/src/test/kotlin/AWSSNSTest.kt index 26d91d5f37b..a5ac01c3b75 100644 --- a/kotlin/usecases/topics_and_queues/src/test/kotlin/AWSSNSTest.kt +++ b/kotlin/usecases/topics_and_queues/src/test/kotlin/AWSSNSTest.kt @@ -141,7 +141,7 @@ class AWSSNSTest { @Test @Order(2) fun testWorkflowNonFIFO() = runBlocking { - val accountId = "814548047983" + val accountId = "" val topicName: String val topicArnVal: String? val sqsQueueName: String diff --git a/python/example_code/bedrock-agent-runtime/README.md b/python/example_code/bedrock-agent-runtime/README.md index b6bc29f2684..f0657454c6b 100644 --- a/python/example_code/bedrock-agent-runtime/README.md +++ b/python/example_code/bedrock-agent-runtime/README.md @@ -34,11 +34,19 @@ python -m pip install -r requirements.txt +### Basics + +Code examples that show you how to perform the essential operations within a service. + +- [Learn the basics](flows/flow-conversation.py) + + ### Single actions Code excerpts that show you how to call individual service functions. - [InvokeAgent](bedrock_agent_runtime_wrapper.py#L33) +- [InvokeFlow](bedrock_agent_runtime_wrapper.py#L71) @@ -53,6 +61,24 @@ Code excerpts that show you how to call individual service functions. +#### Learn the basics + +This example shows you how to use InvokeFlow to converse with an Amazon Bedrock flow that includes an agent node. + + + + + +Start the example by running the following at a command prompt: + +``` +python flows/flow-conversation.py +``` + + + + + ### Tests @@ -80,4 +106,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/bedrock-agent-runtime/bedrock_agent_runtime_wrapper.py b/python/example_code/bedrock-agent-runtime/bedrock_agent_runtime_wrapper.py index ab15b6a0de6..8651ca90cd5 100644 --- a/python/example_code/bedrock-agent-runtime/bedrock_agent_runtime_wrapper.py +++ b/python/example_code/bedrock-agent-runtime/bedrock_agent_runtime_wrapper.py @@ -68,4 +68,58 @@ def invoke_agent(self, agent_id, agent_alias_id, session_id, prompt): # snippet-end:[python.example_code.bedrock-agent-runtime.InvokeAgent] -# snippet-end:[python.example_code.bedrock-agent-runtime.BedrockAgentsRuntimeWrapper.class] + # snippet-start:[python.example_code.bedrock-agent-runtime.InvokeFlow] + def invoke_flow(self, flow_id, flow_alias_id, input_data, execution_id): + """ + Invoke an Amazon Bedrock flow and handle the response stream. + + Args: + param flow_id: The ID of the flow to invoke. + param flow_alias_id: The alias ID of the flow. + param input_data: Input data for the flow. + param execution_id: Execution ID for continuing a flow. Use the value None on first run. + + Return: Response from the flow. + """ + try: + + request_params = None + + if execution_id is None: + # Don't pass execution ID for first run. + request_params = { + "flowIdentifier": flow_id, + "flowAliasIdentifier": flow_alias_id, + "inputs": input_data, + "enableTrace": True + } + else: + request_params = { + "flowIdentifier": flow_id, + "flowAliasIdentifier": flow_alias_id, + "executionId": execution_id, + "inputs": input_data, + "enableTrace": True + } + + response = self.agents_runtime_client.invoke_flow(**request_params) + + if "executionId" not in request_params: + execution_id = response['executionId'] + + result = "" + + # Get the streaming response + for event in response['responseStream']: + result = result + str(event) + '\n' + print(result) + + except ClientError as e: + logger.error("Couldn't invoke flow %s.", {e}) + raise + + return result + + # snippet-end:[python.example_code.bedrock-agent-runtime.InvokeFlow] + +# snippet-end:[python.example_code.bedrock-agent-runtime.BedrockAgentsRuntimeWrapper.class] \ No newline at end of file diff --git a/python/example_code/bedrock-agent-runtime/flows/flow-conversation.py b/python/example_code/bedrock-agent-runtime/flows/flow-conversation.py new file mode 100644 index 00000000000..b5fccd1c2e5 --- /dev/null +++ b/python/example_code/bedrock-agent-runtime/flows/flow-conversation.py @@ -0,0 +1,182 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# snippet-start:[python.example_code.bedrock-agent-runtime.flow_conversation.complete] + + +""" +Shows how to run an Amazon Bedrock flow with InvokeFlow and handle muli-turn interaction +for a single conversation. +For more information, see https://docs.aws.amazon.com/bedrock/latest/userguide/flows-multi-turn-invocation.html. + +""" +import logging +import boto3 +import botocore + +import botocore.exceptions + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def invoke_flow(client, flow_id, flow_alias_id, input_data, execution_id): + """ + Invoke an Amazon Bedrock flow and handle the response stream. + + Args: + client: Boto3 client for Amazon Bedrock agent runtime. + flow_id: The ID of the flow to invoke. + flow_alias_id: The alias ID of the flow. + input_data: Input data for the flow. + execution_id: Execution ID for continuing a flow. Use the value None on first run. + + Returns: + Dict containing flow_complete status, input_required info, and execution_id + """ + + response = None + request_params = None + + if execution_id is None: + # Don't pass execution ID for first run. + request_params = { + "flowIdentifier": flow_id, + "flowAliasIdentifier": flow_alias_id, + "inputs": [input_data], + "enableTrace": True + } + else: + request_params = { + "flowIdentifier": flow_id, + "flowAliasIdentifier": flow_alias_id, + "executionId": execution_id, + "inputs": [input_data], + "enableTrace": True + } + + response = client.invoke_flow(**request_params) + + if "executionId" not in request_params: + execution_id = response['executionId'] + + input_required = None + flow_status = "" + + # Process the streaming response + for event in response['responseStream']: + + # Check if flow is complete. + if 'flowCompletionEvent' in event: + flow_status = event['flowCompletionEvent']['completionReason'] + + # Check if more input us needed from user. + elif 'flowMultiTurnInputRequestEvent' in event: + input_required = event + + # Print the model output. + elif 'flowOutputEvent' in event: + print(event['flowOutputEvent']['content']['document']) + + # Log trace events. + elif 'flowTraceEvent' in event: + logger.info("Flow trace: %s", event['flowTraceEvent']) + + return { + "flow_status": flow_status, + "input_required": input_required, + "execution_id": execution_id + } + + +def converse_with_flow(bedrock_agent_client, flow_id, flow_alias_id): + """ + Run a conversation with the supplied flow. + + Args: + bedrock_agent_client: Boto3 client for Amazon Bedrock agent runtime. + flow_id: The ID of the flow to run. + flow_alias_id: The alias ID of the flow. + + """ + + flow_execution_id = None + finished = False + + # Get the intial prompt from the user. + user_input = input("Enter input: ") + + # Use prompt to create input data. + flow_input_data = { + "content": { + "document": user_input + }, + "nodeName": "FlowInputNode", + "nodeOutputName": "document" + } + + try: + while not finished: + # Invoke the flow until successfully finished. + + result = invoke_flow( + bedrock_agent_client, flow_id, flow_alias_id, flow_input_data, flow_execution_id) + + status = result['flow_status'] + flow_execution_id = result['execution_id'] + more_input = result['input_required'] + if status == "INPUT_REQUIRED": + # The flow needs more information from the user. + logger.info("The flow %s requires more input", flow_id) + user_input = input( + more_input['flowMultiTurnInputRequestEvent']['content']['document'] + ": ") + flow_input_data = { + "content": { + "document": user_input + }, + "nodeName": more_input['flowMultiTurnInputRequestEvent']['nodeName'], + "nodeInputName": "agentInputText" + + } + elif status == "SUCCESS": + # The flow completed successfully. + finished = True + logger.info("The flow %s successfully completed.", flow_id) + + except botocore.exceptions.ClientError as e: + print(f"Client error: {str(e)}") + logger.error("Client error: %s", {str(e)}) + + except Exception as e: + print(f"An error occurred: {str(e)}") + logger.error("An error occurred: %s", {str(e)}) + logger.error("Error type: %s", {type(e)}) + + +def main(): + """ + Main entry point for the script. + """ + + # Replace these with your actual flow ID and flow alias ID. + FLOW_ID = 'YOUR_FLOW_ID' + FLOW_ALIAS_ID = 'YOUR_FLOW_ALIAS_ID' + + logger.info("Starting conversation with FLOW: %s ID: %s", + FLOW_ID, FLOW_ALIAS_ID) + + # Get the Bedrock agent runtime client. + session = boto3.Session(profile_name='default') + bedrock_agent_client = session.client('bedrock-agent-runtime') + + # Start the conversation. + converse_with_flow(bedrock_agent_client, FLOW_ID, FLOW_ALIAS_ID) + + logger.info("Conversation with FLOW: %s ID: %s finished", + FLOW_ID, FLOW_ALIAS_ID) + + +if __name__ == "__main__": + main() + + # snippet-end:[python.example_code.bedrock-agent-runtime.flow_conversation.complete] diff --git a/python/example_code/bedrock-agent-runtime/requirements.txt b/python/example_code/bedrock-agent-runtime/requirements.txt index db118baeb60..bfec492f1b4 100644 --- a/python/example_code/bedrock-agent-runtime/requirements.txt +++ b/python/example_code/bedrock-agent-runtime/requirements.txt @@ -1,5 +1,5 @@ -boto3==1.33.8 -botocore==1.33.8 +boto3==1.36.13 +botocore==1.36.13 colorama==0.4.6 iniconfig==2.0.0 jmespath==1.0.1 diff --git a/python/example_code/bedrock-agent-runtime/test/test_bedrock_agent_runtime_wrapper.py b/python/example_code/bedrock-agent-runtime/test/test_bedrock_agent_runtime_wrapper.py index 36382439560..ee3181c3ec1 100644 --- a/python/example_code/bedrock-agent-runtime/test/test_bedrock_agent_runtime_wrapper.py +++ b/python/example_code/bedrock-agent-runtime/test/test_bedrock_agent_runtime_wrapper.py @@ -40,8 +40,52 @@ async def test_invoke_agent(make_stubber, error_code): if error_code is None: wrapper.invoke_agent(agent_id, agent_alias_id, session_id, prompt) else: - with pytest.raises(ClientError): + with pytest.raises(ClientError) as exc_info: async for _ in wrapper.invoke_agent( agent_id, agent_alias_id, session_id, prompt ): assert exc_info.value.response["Error"]["Code"] == error_code + +@pytest.mark.asyncio +@pytest.mark.parametrize("error_code", [None, "ClientError"]) +async def test_invoke_flow(make_stubber, error_code): + runtime_client = boto3.client( + service_name="bedrock-agent-runtime", region_name="us-east-1" + ) + stubber = make_stubber(runtime_client) + wrapper = BedrockAgentRuntimeWrapper(runtime_client) + + flow_id = "FAKE_AGENT_ID" + flow_alias_id = "FAKE_AGENT_ALIAS_ID" + execution_id = "FAKE_SESSION_ID" + + + inputs = [ + { + "content": { + "document": "hello!" + }, + "nodeName": "FlowInputNode", + "nodeOutputName": "document" + } + ] + + expected_params = { + "enableTrace" : True, + "flowIdentifier": flow_id, + "flowAliasIdentifier": flow_alias_id, + "executionId": execution_id, + "inputs": inputs + } + response = {"responseStream": {}, "executionId": execution_id} + + stubber.stub_invoke_flow(expected_params, response, error_code=error_code) + + if error_code is None: + result = wrapper.invoke_flow(flow_id, flow_alias_id, inputs, execution_id) + assert result is not None + else: + with pytest.raises(ClientError) as exc_info: + wrapper.invoke_flow(flow_id, flow_alias_id, inputs, execution_id) + + assert exc_info.value.response["Error"]["Code"] == error_code diff --git a/python/example_code/bedrock-agent-runtime/test/test_flow_conversation.py b/python/example_code/bedrock-agent-runtime/test/test_flow_conversation.py new file mode 100644 index 00000000000..fd619675009 --- /dev/null +++ b/python/example_code/bedrock-agent-runtime/test/test_flow_conversation.py @@ -0,0 +1,31 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import pytest +import subprocess +import sys + +files_under_test = [ + "flows/flow-conversation.py" +] + +@pytest.mark.integ +@pytest.mark.parametrize("file", files_under_test) +def test_flow_conversation(file): + # Simulate user input - each string represents one input() call + # If you're using the docs at https://docs.aws.amazon.com/bedrock/latest/userguide/flows-multi-turn-invocation.html, + # "Create a playlist\n 3\n pop, castles\n" should work with Antropic Haiku. + test_input = "Hello\n" + + result = subprocess.run( + [sys.executable, file], + input=test_input, + capture_output=True, + text=True, + ) + + print(f"STDOUT: {result.stdout}") # For debugging + print(f"STDERR: {result.stderr}") # For debugging + + assert result.stdout != "" + assert result.returncode == 0 diff --git a/python/example_code/bedrock-runtime/README.md b/python/example_code/bedrock-runtime/README.md index 03eb70ca24b..68a71a78623 100644 --- a/python/example_code/bedrock-runtime/README.md +++ b/python/example_code/bedrock-runtime/README.md @@ -38,11 +38,32 @@ python -m pip install -r requirements.txt > see [Model access](https://docs.aws.amazon.com/bedrock/latest/userguide/model-access.html). > + +### Get started + +- [Hello Amazon Bedrock Runtime](hello/hello_bedrock_runtime_invoke.py#L5) (`InvokeModel`) + +### Scenarios + +Code examples that show you how to accomplish a specific task by calling multiple +functions within the same service. + +- [Tool use with the Converse API](cross-model-scenarios/tool_use_demo/tool_use_demo.py) + ### AI21 Labs Jurassic-2 - [Converse](models/ai21_labs_jurassic2/converse.py#L4) - [InvokeModel](models/ai21_labs_jurassic2/invoke_model.py#L4) +### Amazon Nova + +- [Converse](models/amazon_nova/amazon_nova_text/converse.py#L4) +- [ConverseStream](models/amazon_nova/amazon_nova_text/converse_stream.py#L4) + +### Amazon Nova Canvas + +- [InvokeModel](models/amazon_nova/amazon_nova_canvas/invoke_model.py#L4) + ### Amazon Titan Image Generator - [InvokeModel](models/amazon_titan_image_generator/invoke_model.py#L4) @@ -120,7 +141,32 @@ Mistral AI. +#### Hello Amazon Bedrock Runtime + +This example shows you how to get started using Amazon Bedrock Runtime. + +``` +python hello/hello_bedrock_runtime_invoke.py +``` + + +#### Tool use with the Converse API + +This example shows you how to build a typical interaction between an application, a generative AI model, and connected tools or APIs to mediate interactions between the AI and the outside world. It uses the example of connecting an external weather API to the AI model so it can provide real-time weather information based on user input. + + + + + +Start the example by running the following at a command prompt: + +``` +python cross-model-scenarios/tool_use_demo/tool_use_demo.py +``` + + + ### Tests @@ -148,4 +194,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/requirements.txt b/python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/requirements.txt index e9bf5c64b29..6d21f66572b 100644 --- a/python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/requirements.txt +++ b/python/example_code/bedrock-runtime/cross-model-scenarios/tool_use_demo/requirements.txt @@ -1,6 +1,6 @@ boto3==1.34.124 botocore==1.34.124 -certifi==2024.6.2 +certifi==2024.7.4 charset-normalizer==3.3.2 idna==3.7 jmespath==1.0.1 diff --git a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py new file mode 100644 index 00000000000..e6139428532 --- /dev/null +++ b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_converse.py @@ -0,0 +1,83 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + + +# snippet-start:[bedrock-runtime.example_code.hello_bedrock_converse.complete] + +""" +Uses the Amazon Bedrock runtime client Converse operation to send a user message to a model. +""" +import logging +import boto3 + +from botocore.exceptions import ClientError + + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def converse(brt, model_id, user_message): + """ + Uses the Converse operation to send a user message to the supplied model. + param brt: A bedrock runtime boto3 client + param model_id: The model ID for the model that you want to use. + param user message: The user message that you want to send to the model. + + :return: The text response from the model. + """ + + # Format the request payload using the model's native structure. + conversation = [ + { + "role": "user", + "content": [{"text": user_message}], + } +] + + try: + # Send the message to the model, using a basic inference configuration. + response = brt.converse( + modelId=model_id, + messages=conversation, + inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, + ) + + # Extract and print the response text. + response_text = response["output"]["message"]["content"][0]["text"] + return response_text + + except (ClientError, Exception) as e: + print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") + raise + + +def main(): + """Entry point for the example. Uses the AWS SDK for Python (Boto3) + to create an Amazon Bedrock runtime client. Then sends a user message to a model + in the region set in the callers profile and credentials. + """ + + # Create an Amazon Bedrock Runtime client. + brt = boto3.client("bedrock-runtime") + + # Set the model ID, e.g., Amazon Titan Text G1 - Express. + model_id = "amazon.titan-text-express-v1" + + # Define the message for the model. + message = "Describe the purpose of a 'hello world' program in one line." + + # Send the message to the model. + response = converse(brt, model_id, message) + + print(f"Response: {response}") + + logger.info("Done.") + + +if __name__ == "__main__": + main() + + # snippet-end:[bedrock-runtime.example_code.hello_bedrock_converse.complete] + + diff --git a/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py new file mode 100644 index 00000000000..d86268579dc --- /dev/null +++ b/python/example_code/bedrock-runtime/hello/hello_bedrock_runtime_invoke.py @@ -0,0 +1,87 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + + +# snippet-start:[bedrock-runtime.example_code.hello_bedrock_invoke.complete] + +""" +Uses the Amazon Bedrock runtime client InvokeModel operation to send a prompt to a model. +""" +import logging +import json +import boto3 + + +from botocore.exceptions import ClientError + + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + + +def invoke_model(brt, model_id, prompt): + """ + Invokes the specified model with the supplied prompt. + param brt: A bedrock runtime boto3 client + param model_id: The model ID for the model that you want to use. + param prompt: The prompt that you want to send to the model. + + :return: The text response from the model. + """ + + # Format the request payload using the model's native structure. + native_request = { + "inputText": prompt, + "textGenerationConfig": { + "maxTokenCount": 512, + "temperature": 0.5, + "topP": 0.9 + } + } + + # Convert the native request to JSON. + request = json.dumps(native_request) + + try: + # Invoke the model with the request. + response = brt.invoke_model(modelId=model_id, body=request) + + # Decode the response body. + model_response = json.loads(response["body"].read()) + + # Extract and print the response text. + response_text = model_response["results"][0]["outputText"] + return response_text + + except (ClientError, Exception) as e: + print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") + raise + + +def main(): + """Entry point for the example. Uses the AWS SDK for Python (Boto3) + to create an Amazon Bedrock runtime client. Then sends a prompt to a model + in the region set in the callers profile and credentials. + """ + + # Create an Amazon Bedrock Runtime client. + brt = boto3.client("bedrock-runtime") + + # Set the model ID, e.g., Amazon Titan Text G1 - Express. + model_id = "amazon.titan-text-express-v1" + + # Define the prompt for the model. + prompt = "Describe the purpose of a 'hello world' program in one line." + + # Send the prompt to the model. + response = invoke_model(brt, model_id, prompt) + + print(f"Response: {response}") + + logger.info("Done.") + + +if __name__ == "__main__": + main() + + # snippet-end:[bedrock-runtime.example_code.hello_bedrock_invoke.complete] diff --git a/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_canvas/invoke_model.py b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_canvas/invoke_model.py new file mode 100644 index 00000000000..0e479fbf832 --- /dev/null +++ b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_canvas/invoke_model.py @@ -0,0 +1,66 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# snippet-start:[python.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] +# Use the native inference API to create an image with Amazon Nova Canvas + +import base64 +import json +import os +import random + +import boto3 + +# Create a Bedrock Runtime client in the AWS Region of your choice. +client = boto3.client("bedrock-runtime", region_name="us-east-1") + +# Set the model ID. +model_id = "amazon.nova-canvas-v1:0" + +# Define the image generation prompt for the model. +prompt = "A stylized picture of a cute old steampunk robot." + +# Generate a random seed between 0 and 858,993,459 +seed = random.randint(0, 858993460) + +# Format the request payload using the model's native structure. +native_request = { + "taskType": "TEXT_IMAGE", + "textToImageParams": {"text": prompt}, + "imageGenerationConfig": { + "seed": seed, + "quality": "standard", + "height": 512, + "width": 512, + "numberOfImages": 1, + }, +} + +# Convert the native request to JSON. +request = json.dumps(native_request) + +# Invoke the model with the request. +response = client.invoke_model(modelId=model_id, body=request) + +# Decode the response body. +model_response = json.loads(response["body"].read()) + +# Extract the image data. +base64_image_data = model_response["images"][0] + +# Save the generated image to a local folder. +i, output_dir = 1, "output" +if not os.path.exists(output_dir): + os.makedirs(output_dir) +while os.path.exists(os.path.join(output_dir, f"nova_canvas_{i}.png")): + i += 1 + +image_data = base64.b64decode(base64_image_data) + +image_path = os.path.join(output_dir, f"nova_canvas_{i}.png") +with open(image_path, "wb") as file: + file.write(image_data) + +print(f"The generated image has been saved to {image_path}") + +# snippet-end:[python.example_code.bedrock-runtime.InvokeModel_AmazonNovaImageGeneration] diff --git a/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse.py b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse.py new file mode 100644 index 00000000000..3000a6a62f2 --- /dev/null +++ b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse.py @@ -0,0 +1,41 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# snippet-start:[python.example_code.bedrock-runtime.Converse_AmazonNovaText] +# Use the Conversation API to send a text message to Amazon Nova. + +import boto3 +from botocore.exceptions import ClientError + +# Create a Bedrock Runtime client in the AWS Region you want to use. +client = boto3.client("bedrock-runtime", region_name="us-east-1") + +# Set the model ID, e.g., Amazon Nova Lite. +model_id = "amazon.nova-lite-v1:0" + +# Start a conversation with the user message. +user_message = "Describe the purpose of a 'hello world' program in one line." +conversation = [ + { + "role": "user", + "content": [{"text": user_message}], + } +] + +try: + # Send the message to the model, using a basic inference configuration. + response = client.converse( + modelId=model_id, + messages=conversation, + inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, + ) + + # Extract and print the response text. + response_text = response["output"]["message"]["content"][0]["text"] + print(response_text) + +except (ClientError, Exception) as e: + print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") + exit(1) + +# snippet-end:[python.example_code.bedrock-runtime.Converse_AmazonNovaText] diff --git a/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse_stream.py b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse_stream.py new file mode 100644 index 00000000000..a71bacf375f --- /dev/null +++ b/python/example_code/bedrock-runtime/models/amazon_nova/amazon_nova_text/converse_stream.py @@ -0,0 +1,44 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +# snippet-start:[python.example_code.bedrock-runtime.ConverseStream_AmazonNovaText] +# Use the Conversation API to send a text message to Amazon Nova Text +# and print the response stream. + +import boto3 +from botocore.exceptions import ClientError + +# Create a Bedrock Runtime client in the AWS Region you want to use. +client = boto3.client("bedrock-runtime", region_name="us-east-1") + +# Set the model ID, e.g., Amazon Nova Lite. +model_id = "amazon.nova-lite-v1:0" + +# Start a conversation with the user message. +user_message = "Describe the purpose of a 'hello world' program in one line." +conversation = [ + { + "role": "user", + "content": [{"text": user_message}], + } +] + +try: + # Send the message to the model, using a basic inference configuration. + streaming_response = client.converse_stream( + modelId=model_id, + messages=conversation, + inferenceConfig={"maxTokens": 512, "temperature": 0.5, "topP": 0.9}, + ) + + # Extract and print the streamed response text in real-time. + for chunk in streaming_response["stream"]: + if "contentBlockDelta" in chunk: + text = chunk["contentBlockDelta"]["delta"]["text"] + print(text, end="") + +except (ClientError, Exception) as e: + print(f"ERROR: Can't invoke '{model_id}'. Reason: {e}") + exit(1) + +# snippet-end:[python.example_code.bedrock-runtime.ConverseStream_AmazonNovaText] diff --git a/python/example_code/bedrock-runtime/requirements.txt b/python/example_code/bedrock-runtime/requirements.txt index e084f5d893a..2e935f7aa85 100644 --- a/python/example_code/bedrock-runtime/requirements.txt +++ b/python/example_code/bedrock-runtime/requirements.txt @@ -1,35 +1,12 @@ -beautifulsoup4==4.12.3 -boto3==1.35.28 -botocore==1.35.28 -certifi==2024.8.30 -charset-normalizer==3.3.2 +boto3==1.36.22 +botocore==1.36.22 colorama==0.4.6 -contourpy==1.3.0 -cycler==0.12.1 -fonttools==4.54.1 -geojson==3.1.0 -idna==3.10 iniconfig==2.0.0 jmespath==1.0.1 -kiwisolver==1.4.7 -lxml==5.3.0 -matplotlib==3.9.2 -numpy==2.1.1 -packaging==24.1 -pandas==2.2.3 -pillow==10.4.0 -pip-review==1.3.0 +packaging==24.2 pluggy==1.5.0 -pyparsing==3.1.4 -pytest==8.3.3 -pytest-asyncio==0.24.0 +pytest==8.3.4 python-dateutil==2.9.0.post0 -pytz==2024.2 -requests==2.32.3 -s3transfer==0.10.2 -six==1.16.0 -soupsieve==2.6 -tzdata==2024.2 -ujson==5.10.0 -urllib3==2.2.3 -xarray==2024.9.0 +s3transfer==0.11.2 +six==1.17.0 +urllib3==2.3.0 diff --git a/python/example_code/bedrock-runtime/test/test_converse.py b/python/example_code/bedrock-runtime/test/test_converse.py index a24b9c2fd42..4bcefb110ae 100644 --- a/python/example_code/bedrock-runtime/test/test_converse.py +++ b/python/example_code/bedrock-runtime/test/test_converse.py @@ -1,12 +1,14 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import pytest import subprocess import sys +import pytest + files_under_test = [ "models/ai21_labs_jurassic2/converse.py", + "models/amazon_nova/amazon_nova_text/converse.py", "models/amazon_titan_text/converse.py", "models/anthropic_claude/converse.py", "models/cohere_command/converse.py", diff --git a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py new file mode 100644 index 00000000000..f69602ba462 --- /dev/null +++ b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime.py @@ -0,0 +1,23 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import pytest +import subprocess +import sys + +files_under_test = [ + # Text models + "hello/hello_bedrock_runtime_invoke.py" +] + + +@pytest.mark.integ +@pytest.mark.parametrize("file", files_under_test) +def test_hello_bedrock(file): + result = subprocess.run( + [sys.executable, file], + capture_output=True, + text=True, + ) + assert result.stdout != "" + assert result.returncode == 0 diff --git a/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py new file mode 100644 index 00000000000..8de7d56b722 --- /dev/null +++ b/python/example_code/bedrock-runtime/test/test_hello_bedrock_runtime_invoke.py @@ -0,0 +1,24 @@ +# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +# SPDX-License-Identifier: Apache-2.0 + +import pytest +import subprocess +import sys + +files_under_test = [ + # Text models + "hello/hello_bedrock_runtime_invoke.py", + "hello/hello_bedrock_runtime_converse.py" +] + + +@pytest.mark.integ +@pytest.mark.parametrize("file", files_under_test) +def test_hello_bedrock(file): + result = subprocess.run( + [sys.executable, file], + capture_output=True, + text=True, + ) + assert result.stdout != "" + assert result.returncode == 0 diff --git a/python/example_code/bedrock-runtime/test/test_invoke_model.py b/python/example_code/bedrock-runtime/test/test_invoke_model.py index 1193de8d162..562ad785d8e 100644 --- a/python/example_code/bedrock-runtime/test/test_invoke_model.py +++ b/python/example_code/bedrock-runtime/test/test_invoke_model.py @@ -1,13 +1,15 @@ # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. # SPDX-License-Identifier: Apache-2.0 -import pytest import subprocess import sys +import pytest + files_under_test = [ # Text models "models/ai21_labs_jurassic2/invoke_model.py", + "models/amazon_nova/amazon_nova_canvas/invoke_model.py", "models/amazon_titan_text/invoke_model.py", "models/anthropic_claude/invoke_model.py", "models/cohere_command/command_invoke_model.py", diff --git a/python/example_code/cloudwatch-logs/README.md b/python/example_code/cloudwatch-logs/README.md index a7da60c829c..63f8d0735c4 100644 --- a/python/example_code/cloudwatch-logs/README.md +++ b/python/example_code/cloudwatch-logs/README.md @@ -47,6 +47,7 @@ Code examples that show you how to accomplish a specific task by calling multipl functions within the same service. - [Run a large query](scenarios/large-query/exec.py) +- [Use scheduled events to invoke a Lambda function](../../example_code/lambda) @@ -80,6 +81,18 @@ python scenarios/large-query/exec.py +#### Use scheduled events to invoke a Lambda function + +This example shows you how to create an AWS Lambda function invoked by an Amazon EventBridge scheduled event. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. diff --git a/python/example_code/dynamodb/README.md b/python/example_code/dynamodb/README.md index 0853a0648a6..b3d81a5cda4 100644 --- a/python/example_code/dynamodb/README.md +++ b/python/example_code/dynamodb/README.md @@ -77,6 +77,8 @@ functions within the same service. - [Create a websocket chat application](../../cross_service/apigateway_websocket_chat) - [Query a table by using batches of PartiQL statements](partiql/scenario_partiql_batch.py) - [Query a table using PartiQL](partiql/scenario_partiql_single.py) +- [Use API Gateway to invoke a Lambda function](../../example_code/lambda) +- [Use scheduled events to invoke a Lambda function](../../example_code/lambda) @@ -259,6 +261,30 @@ python partiql/scenario_partiql_single.py +#### Use API Gateway to invoke a Lambda function + +This example shows you how to create an AWS Lambda function invoked by Amazon API Gateway. + + + + + + + + + +#### Use scheduled events to invoke a Lambda function + +This example shows you how to create an AWS Lambda function invoked by an Amazon EventBridge scheduled event. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. @@ -285,4 +311,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/iotsitewise/README.md b/python/example_code/iotsitewise/README.md index 7a44db848c8..9c0d4a4adda 100644 --- a/python/example_code/iotsitewise/README.md +++ b/python/example_code/iotsitewise/README.md @@ -86,8 +86,17 @@ python hello/hello_iot_sitewise.py #### Learn the basics -This example shows you how to learn core operations for AWS IoT SiteWise using an AWS SDK. - +This example shows you how to do the following: + +- Create an AWS IoT SiteWise Asset Model. +- Create an AWS IoT SiteWise Asset. +- Retrieve the property ID values. +- Send data to an AWS IoT SiteWise Asset. +- Retrieve the value of the AWS IoT SiteWise Asset property. +- Create an AWS IoT SiteWise Portal. +- Create an AWS IoT SiteWise Gateway. +- Describe the AWS IoT SiteWise Gateway. +- Delete the AWS IoT SiteWise Assets. diff --git a/python/example_code/redshift/README.md b/python/example_code/redshift/README.md index 865bbedb419..dbea462021c 100644 --- a/python/example_code/redshift/README.md +++ b/python/example_code/redshift/README.md @@ -79,8 +79,15 @@ python hello.py #### Learn the basics -This example shows you how to learn core operations for Amazon Redshift using an AWS SDK. +This example shows you how to do the following: +- Create a Redshift cluster. +- List databases in the cluster. +- Create a table named Movies. +- Populate the Movies table. +- Query the Movies table by year. +- Modify the Redshift cluster. +- Delete the Amazon Redshift cluster. @@ -122,4 +129,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/s3/README.md b/python/example_code/s3/README.md index b5568cded93..1a80d184754 100644 --- a/python/example_code/s3/README.md +++ b/python/example_code/s3/README.md @@ -88,6 +88,7 @@ functions within the same service. - [Create an Amazon Textract explorer application](../../cross_service/textract_explorer) - [Detect entities in text extracted from an image](../../cross_service/textract_comprehend_notebook) - [Detect objects in images](../../cross_service/photo_analyzer) +- [Detect people and objects in a video](../../example_code/rekognition) - [Make conditional requests](scenarios/conditional_requests/scenario.py) - [Manage versioned objects in batches with a Lambda function](../../example_code/s3/s3_versioning) - [Upload or download large files](file_transfer/file_transfer.py) @@ -191,6 +192,18 @@ This example shows you how to build an app that uses Amazon Rekognition to detec +#### Detect people and objects in a video + +This example shows you how to detect people and objects in a video with Amazon Rekognition. + + + + + + + + + #### Make conditional requests This example shows you how to add preconditions to Amazon S3 requests. @@ -290,4 +303,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/ses/README.md b/python/example_code/ses/README.md index 8424785731b..557a434322b 100644 --- a/python/example_code/ses/README.md +++ b/python/example_code/ses/README.md @@ -68,6 +68,7 @@ functions within the same service. - [Create a web application to track DynamoDB data](../../cross_service/dynamodb_item_tracker) - [Create an Aurora Serverless work item tracker](../../cross_service/aurora_item_tracker) - [Detect objects in images](../../cross_service/photo_analyzer) +- [Detect people and objects in a video](../../example_code/rekognition) - [Generate credentials to connect to an SMTP endpoint](ses_generate_smtp_credentials.py) - [Verify an email identity and send messages](ses_email.py) @@ -139,6 +140,18 @@ This example shows you how to build an app that uses Amazon Rekognition to detec +#### Detect people and objects in a video + +This example shows you how to detect people and objects in a video with Amazon Rekognition. + + + + + + + + + #### Generate credentials to connect to an SMTP endpoint This example shows you how to generate credentials to connect to an Amazon SES SMTP endpoint. @@ -205,4 +218,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/sns/README.md b/python/example_code/sns/README.md index 69a60fc45e4..6946b1c7285 100644 --- a/python/example_code/sns/README.md +++ b/python/example_code/sns/README.md @@ -54,7 +54,10 @@ functions within the same service. - [Create an Amazon Textract explorer application](../../cross_service/textract_explorer) - [Create and publish to a FIFO topic](sns_fifo_topic.py) +- [Detect people and objects in a video](../../example_code/rekognition) - [Publish an SMS text message](sns_basics.py) +- [Use API Gateway to invoke a Lambda function](../../example_code/lambda) +- [Use scheduled events to invoke a Lambda function](../../example_code/lambda) @@ -105,6 +108,18 @@ python sns_fifo_topic.py +#### Detect people and objects in a video + +This example shows you how to detect people and objects in a video with Amazon Rekognition. + + + + + + + + + #### Publish an SMS text message This example shows you how to publish SMS messages using Amazon SNS. @@ -123,6 +138,30 @@ python sns_basics.py +#### Use API Gateway to invoke a Lambda function + +This example shows you how to create an AWS Lambda function invoked by Amazon API Gateway. + + + + + + + + + +#### Use scheduled events to invoke a Lambda function + +This example shows you how to create an AWS Lambda function invoked by an Amazon EventBridge scheduled event. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. @@ -149,4 +188,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/sqs/README.md b/python/example_code/sqs/README.md index 4bfc84d168e..a208669c236 100644 --- a/python/example_code/sqs/README.md +++ b/python/example_code/sqs/README.md @@ -56,6 +56,7 @@ functions within the same service. - [Create a messenger application](../../cross_service/stepfunctions_messenger) - [Create an Amazon Textract explorer application](../../cross_service/textract_explorer) - [Create and publish to a FIFO topic](../sns/sns_fifo_topic.py) +- [Detect people and objects in a video](../../example_code/rekognition) - [Send and receive batches of messages](message_wrapper.py) @@ -114,6 +115,18 @@ python ../sns/sns_fifo_topic.py +#### Detect people and objects in a video + +This example shows you how to detect people and objects in a video with Amazon Rekognition. + + + + + + + + + #### Send and receive batches of messages This example shows you how to do the following: @@ -162,4 +175,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/example_code/ssm/README.md b/python/example_code/ssm/README.md index cfa07dd0475..588e3a7059a 100644 --- a/python/example_code/ssm/README.md +++ b/python/example_code/ssm/README.md @@ -84,8 +84,15 @@ python hello.py #### Learn the basics -This example shows you how to work with Systems Manager maintenance windows, documents, and OpsItems. +This example shows you how to do the following: +- Create a maintenance window. +- Modify the maintenance window schedule. +- Create a document. +- Send a command to a specified EC2 instance. +- Create an OpsItem. +- Update and resolve the OpsItem. +- Delete the maintenance window, OpsItem, and document. @@ -127,4 +134,4 @@ in the `python` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/python/test_tools/bedrock_agent_runtime_stubber.py b/python/test_tools/bedrock_agent_runtime_stubber.py index 01baec209d8..cc62e9230f9 100644 --- a/python/test_tools/bedrock_agent_runtime_stubber.py +++ b/python/test_tools/bedrock_agent_runtime_stubber.py @@ -31,3 +31,7 @@ def stub_invoke_agent(self, expected_params, response, error_code=None): self._stub_bifurcator( "invoke_agent", expected_params, response, error_code=error_code ) + def stub_invoke_flow(self, expected_params, response, error_code=None): + self._stub_bifurcator( + "invoke_flow", expected_params, response, error_code=error_code + ) \ No newline at end of file diff --git a/resources/cdk/aurora_serverless_app/package-lock.json b/resources/cdk/aurora_serverless_app/package-lock.json index ac787aebbf7..1431a0813a1 100644 --- a/resources/cdk/aurora_serverless_app/package-lock.json +++ b/resources/cdk/aurora_serverless_app/package-lock.json @@ -9,7 +9,6 @@ "version": "0.1.0", "dependencies": { "aws-cdk": "^2.115.0", - "aws-cdk-lib": "^2.132.1", "constructs": "^10.0.38", "source-map-support": "^0.5.21" }, @@ -19,7 +18,7 @@ "devDependencies": { "@types/jest": "^27.4.0", "@types/node": "^17.0.10", - "aws-cdk-lib": "^2.132.1", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.0.38", "jest": "^27.4.7", "ts-jest": "^27.1.3", @@ -45,22 +44,61 @@ } }, "node_modules/@aws-cdk/asset-awscli-v1": { - "version": "2.2.202", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.202.tgz", - "integrity": "sha512-JqlF0D4+EVugnG5dAsNZMqhu3HW7ehOXm5SDMxMbXNDMdsF0pxtQKNHRl52z1U9igsHmaFpUgSGjbhAJ+0JONg==", - "dev": true + "version": "2.2.221", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-awscli-v1/-/asset-awscli-v1-2.2.221.tgz", + "integrity": "sha512-+Vu2cMvgtkaHwNezrTVng4+FAMAWKJTkC/2ZQlgkbY05k0lHHK/2eWKqBhTeA7EpxVrx9uFN7GdBFz3mcThpxg==", + "dev": true, + "license": "Apache-2.0" }, "node_modules/@aws-cdk/asset-kubectl-v20": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.2.tgz", - "integrity": "sha512-3M2tELJOxQv0apCIiuKQ4pAbncz9GuLwnKFqxifWfe77wuMxyTRPmxssYHs42ePqzap1LT6GDcPygGs+hHstLg==", - "dev": true + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-kubectl-v20/-/asset-kubectl-v20-2.1.3.tgz", + "integrity": "sha512-cDG1w3ieM6eOT9mTefRuTypk95+oyD7P5X/wRltwmYxU7nZc3+076YEVS6vrjDKr3ADYbfn0lDKpfB1FBtO9CQ==", + "dev": true, + "license": "Apache-2.0" }, "node_modules/@aws-cdk/asset-node-proxy-agent-v6": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.0.1.tgz", - "integrity": "sha512-DDt4SLdLOwWCjGtltH4VCST7hpOI5DzieuhGZsBpZ+AgJdSI2GCjklCXm0GCTwJG/SolkL5dtQXyUKgg9luBDg==", - "dev": true + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@aws-cdk/asset-node-proxy-agent-v6/-/asset-node-proxy-agent-v6-2.1.0.tgz", + "integrity": "sha512-7bY3J8GCVxLupn/kNmpPc5VJz8grx+4RKfnnJiO1LG+uxkZfANZG3RMHhE+qQxxwkyQ9/MfPtTpf748UhR425A==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@aws-cdk/cloud-assembly-schema": { + "version": "39.2.8", + "resolved": "https://registry.npmjs.org/@aws-cdk/cloud-assembly-schema/-/cloud-assembly-schema-39.2.8.tgz", + "integrity": "sha512-VkppFgLbm5M1/K8S+BimI/0COq+E9fCDcdDyAe4gFizHNZTALZA4sMds2kug1rtPFKCcWAexrycs2D4iQHcRCw==", + "bundleDependencies": [ + "jsonschema", + "semver" + ], + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "jsonschema": "~1.4.1", + "semver": "^7.6.3" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/jsonschema": { + "version": "1.4.1", + "dev": true, + "inBundle": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/@aws-cdk/cloud-assembly-schema/node_modules/semver": { + "version": "7.6.3", + "dev": true, + "inBundle": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } }, "node_modules/@babel/code-frame": { "version": "7.23.5", @@ -1328,9 +1366,9 @@ } }, "node_modules/aws-cdk-lib": { - "version": "2.132.1", - "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.132.1.tgz", - "integrity": "sha512-VheC7WcvmxiteNaZPucS9J9haGQZwbUtwNiNqsbTaEiru6ETUhf/yIOIamLto1kOKEPxCw2bfLkgYrWoCzwOpw==", + "version": "2.177.0", + "resolved": "https://registry.npmjs.org/aws-cdk-lib/-/aws-cdk-lib-2.177.0.tgz", + "integrity": "sha512-nTnHAwjZaPJ5gfJjtzE/MyK6q0a66nWthoJl7l8srucRb+I30dczhbbXor6QCdVpJaTRAEliMOMq23aglsAQbg==", "bundleDependencies": [ "@balena/dockerignore", "case", @@ -1345,20 +1383,22 @@ "mime-types" ], "dev": true, + "license": "Apache-2.0", "dependencies": { - "@aws-cdk/asset-awscli-v1": "^2.2.202", - "@aws-cdk/asset-kubectl-v20": "^2.1.2", - "@aws-cdk/asset-node-proxy-agent-v6": "^2.0.1", + "@aws-cdk/asset-awscli-v1": "^2.2.208", + "@aws-cdk/asset-kubectl-v20": "^2.1.3", + "@aws-cdk/asset-node-proxy-agent-v6": "^2.1.0", + "@aws-cdk/cloud-assembly-schema": "^39.2.0", "@balena/dockerignore": "^1.0.2", "case": "1.6.3", "fs-extra": "^11.2.0", - "ignore": "^5.3.1", + "ignore": "^5.3.2", "jsonschema": "^1.4.1", "mime-types": "^2.1.35", "minimatch": "^3.1.2", "punycode": "^2.3.1", - "semver": "^7.6.0", - "table": "^6.8.1", + "semver": "^7.6.3", + "table": "^6.8.2", "yaml": "1.10.2" }, "engines": { @@ -1375,15 +1415,15 @@ "license": "Apache-2.0" }, "node_modules/aws-cdk-lib/node_modules/ajv": { - "version": "8.12.0", + "version": "8.17.1", "dev": true, "inBundle": true, "license": "MIT", "dependencies": { - "fast-deep-equal": "^3.1.1", + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" + "require-from-string": "^2.0.2" }, "funding": { "type": "github", @@ -1484,6 +1524,12 @@ "inBundle": true, "license": "MIT" }, + "node_modules/aws-cdk-lib/node_modules/fast-uri": { + "version": "3.0.3", + "dev": true, + "inBundle": true, + "license": "BSD-3-Clause" + }, "node_modules/aws-cdk-lib/node_modules/fs-extra": { "version": "11.2.0", "dev": true, @@ -1505,7 +1551,7 @@ "license": "ISC" }, "node_modules/aws-cdk-lib/node_modules/ignore": { - "version": "5.3.1", + "version": "5.3.2", "dev": true, "inBundle": true, "license": "MIT", @@ -1555,18 +1601,6 @@ "inBundle": true, "license": "MIT" }, - "node_modules/aws-cdk-lib/node_modules/lru-cache": { - "version": "6.0.0", - "dev": true, - "inBundle": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, "node_modules/aws-cdk-lib/node_modules/mime-db": { "version": "1.52.0", "dev": true, @@ -1619,13 +1653,10 @@ } }, "node_modules/aws-cdk-lib/node_modules/semver": { - "version": "7.6.0", + "version": "7.6.3", "dev": true, "inBundle": true, "license": "ISC", - "dependencies": { - "lru-cache": "^6.0.0" - }, "bin": { "semver": "bin/semver.js" }, @@ -1677,7 +1708,7 @@ } }, "node_modules/aws-cdk-lib/node_modules/table": { - "version": "6.8.1", + "version": "6.8.2", "dev": true, "inBundle": true, "license": "BSD-3-Clause", @@ -1701,21 +1732,6 @@ "node": ">= 10.0.0" } }, - "node_modules/aws-cdk-lib/node_modules/uri-js": { - "version": "4.4.1", - "dev": true, - "inBundle": true, - "license": "BSD-2-Clause", - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/aws-cdk-lib/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "inBundle": true, - "license": "ISC" - }, "node_modules/aws-cdk-lib/node_modules/yaml": { "version": "1.10.2", "dev": true, diff --git a/resources/cdk/aurora_serverless_app/package.json b/resources/cdk/aurora_serverless_app/package.json index 6cfc9c08894..b33c40d89c0 100644 --- a/resources/cdk/aurora_serverless_app/package.json +++ b/resources/cdk/aurora_serverless_app/package.json @@ -17,7 +17,7 @@ "devDependencies": { "@types/jest": "^27.4.0", "@types/node": "^17.0.10", - "aws-cdk-lib": "^2.132.1", + "aws-cdk-lib": "^2.177.0", "constructs": "^10.0.38", "jest": "^27.4.7", "ts-jest": "^27.1.3", @@ -29,6 +29,6 @@ "aws-cdk-lib": "^2.80.0", "constructs": "^10.0.38", "source-map-support": "^0.5.21", - "aws-cdk-lib": "^2.132.1" + "aws-cdk-lib": "^2.177.0" } } diff --git a/resources/cdk/entityresolution_resources/README.md b/resources/cdk/entityresolution_resources/README.md new file mode 100644 index 00000000000..a686e715048 --- /dev/null +++ b/resources/cdk/entityresolution_resources/README.md @@ -0,0 +1,53 @@ +# AWS Entity Resolution scenario resources + +## Overview + +This AWS CDK Java application generates a AWS CloudFormation template. +The CloudFormation template creates the following resources for the AWS Entity Resolution scenario application: + +* An AWS IAM role that has permissions required to run this Scenario. +* An AWS Glue table that provides the input data for the entity resolution matching workflow. +* An Amazon S3 input bucket that is used by the AWS Glue table. +* An Amazon S3 output bucket that is used by the matching workflow to store results of the matching workflow. + +## ⚠️ Important + +* When the template is used by the AWS Entity Resolution scenario application, + the resources it creates might result in charges to your account. +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + +## Create a CloudFormation template + +To output a template that creates the CloudFormation stack, execute the following CDK CLI command from the +`resources/cdk/entityresolution_resources` working directory: +``` +cdk synth --yaml > ../../../javav2/example_code/entityresolution/src/main/resources/template.yaml +``` +The result of running this command puts the `template.yaml` file into the directory where +the scenario application can use it. + + +## Outputs generated +When the template is used and the stack is created by the AWS Entity Resolution scenario application, +the following outputs are generated and used in the application: +``` +EntityResolutionCdkStack.EntityResolutionArn = arn:aws:iam::XXXXX:role/EntityResolutionCdkStack-EntityResolutionRoleB51A51-TSzkkBfrkbfm +EntityResolutionCdkStack.GlueDataBucketName = glue-XXXXX3196d +EntityResolutionCdkStack.GlueTableArn = arn:aws:glue:us-east-1:XXXXX:table/entity_resolution_db/entity_resolution +``` + +## How stack-created resources are destroyed +AWS Entity Resolution scenario application destroys the resources created by the stack before it completes. + + +## Additional information + +* [AWS CDK v2 Developer Guide](https://docs.aws.amazon.com/cdk/v2/guide/home.html) +* [AWS CLI User Guide for Version 2](https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-welcome.html) +* [AWS CloudFormation User Guide](https://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/Welcome.html) + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 diff --git a/resources/cdk/entityresolution_resources/cdk.json b/resources/cdk/entityresolution_resources/cdk.json new file mode 100644 index 00000000000..723b2f18b0c --- /dev/null +++ b/resources/cdk/entityresolution_resources/cdk.json @@ -0,0 +1,68 @@ +{ + "app": "mvn -e -q compile exec:java", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "target", + "pom.xml", + "src/test" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, + "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/aws-iam:standardizedServicePrincipals": true, + "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, + "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, + "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, + "@aws-cdk/aws-route53-patters:useCertificate": true, + "@aws-cdk/customresources:installLatestAwsSdkDefault": false, + "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, + "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, + "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, + "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, + "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, + "@aws-cdk/aws-redshift:columnId": true, + "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, + "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, + "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, + "@aws-cdk/aws-kms:aliasNameRef": true, + "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, + "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, + "@aws-cdk/aws-efs:denyAnonymousAccess": true, + "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, + "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, + "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, + "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, + "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, + "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, + "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, + "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, + "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true, + "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true, + "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true, + "@aws-cdk/aws-eks:nodegroupNameAttribute": true, + "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true, + "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true, + "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false + } +} diff --git a/resources/cdk/entityresolution_resources/pom.xml b/resources/cdk/entityresolution_resources/pom.xml new file mode 100644 index 00000000000..56581afc3e7 --- /dev/null +++ b/resources/cdk/entityresolution_resources/pom.xml @@ -0,0 +1,60 @@ + + + 4.0.0 + + com.myorg + entity_resolution_cdk + 0.1 + + + UTF-8 + 2.135.0 + [10.0.0,11.0.0) + 5.7.1 + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + 17 + + + + + org.codehaus.mojo + exec-maven-plugin + 3.1.0 + + com.myorg.EntityResolutionCdkApp + + + + + + + + + software.amazon.awscdk + aws-cdk-lib + ${cdk.version} + + + + software.constructs + constructs + ${constructs.version} + + + + org.junit.jupiter + junit-jupiter + ${junit.version} + test + + + diff --git a/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkApp.java b/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkApp.java new file mode 100644 index 00000000000..ef25c7d2c34 --- /dev/null +++ b/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkApp.java @@ -0,0 +1,23 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.myorg; + +import software.amazon.awscdk.App; +import software.amazon.awscdk.Environment; +import software.amazon.awscdk.StackProps; + +import java.util.Arrays; + +public class EntityResolutionCdkApp { + public static void main(final String[] args) { + App app = new App(); + + new EntityResolutionCdkStack(app, "EntityResolutionCdkStack", StackProps.builder() + // For more information, see https://docs.aws.amazon.com/cdk/latest/guide/environments.html + .build()); + + app.synth(); + } +} + diff --git a/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkStack.java b/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkStack.java new file mode 100644 index 00000000000..c8871872d69 --- /dev/null +++ b/resources/cdk/entityresolution_resources/src/main/java/com/myorg/EntityResolutionCdkStack.java @@ -0,0 +1,151 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +package com.myorg; + +import software.amazon.awscdk.CfnOutput; +import software.amazon.awscdk.CfnOutputProps; +import software.amazon.awscdk.RemovalPolicy; +import software.amazon.awscdk.Stack; +import software.amazon.awscdk.StackProps; +import software.amazon.awscdk.services.glue.CfnDatabase; +import software.amazon.awscdk.services.glue.CfnTable; +import software.amazon.awscdk.services.iam.ManagedPolicy; +import software.amazon.awscdk.services.iam.PolicyStatement; +import software.amazon.awscdk.services.iam.Role; +import software.amazon.awscdk.services.iam.ServicePrincipal; +import software.amazon.awscdk.services.s3.Bucket; +import software.constructs.Construct; + +import java.util.List; +import java.util.Map; +import java.util.UUID; + +public class EntityResolutionCdkStack extends Stack { + public EntityResolutionCdkStack(final Construct scope, final String id) { + this(scope, id, null); + } + + public EntityResolutionCdkStack(final Construct scope, final String id, final StackProps props) { + super(scope, id, props); + final String jsonGlueTableName = "jsongluetable"; + final String csvGlueTableName = "csvgluetable"; + + // 1. Create an S3 bucket for the Glue Data Table + String uniqueId = UUID.randomUUID().toString().replace("-", ""); // Remove dashes to ensure compatibility + + Bucket erBucket = Bucket.Builder.create(this, "ErBucket") + .bucketName("erbucket" + uniqueId) + .versioned(false) + .removalPolicy(RemovalPolicy.DESTROY) + .build(); + + // 2. Create a Glue database + CfnDatabase glueDatabase = CfnDatabase.Builder.create(this, "GlueDatabase") + .catalogId(this.getAccount()) + .databaseInput(CfnDatabase.DatabaseInputProperty.builder() + .name("entity_resolution_db") + .build()) + .build(); + + // 3. Create a Glue table referencing the S3 bucket + final CfnTable jsonErGlueTable = createGlueTable(jsonGlueTableName + , jsonGlueTableName + , glueDatabase.getRef() + , Map.of("id", "string", "name", "string", "email", "string") + , "s3://" + erBucket.getBucketName() + "/jsonData/" + , "org.openx.data.jsonserde.JsonSerDe"); + + // Ensure Glue Table is created after the Database + jsonErGlueTable.addDependency(glueDatabase); + + final CfnTable csvErGlueTable = createGlueTable(csvGlueTableName + , csvGlueTableName + , glueDatabase.getRef() + , Map.of("id", "string", "name", "string", "email", "string", "phone", "string") + , "s3://" + erBucket.getBucketName() + "/csvData/" + , "org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe"); + // Ensure Glue Table is created after the Database + csvErGlueTable.addDependency(glueDatabase); + + // 4. Create an IAM Role for AWS Entity Resolution + Role entityResolutionRole = Role.Builder.create(this, "EntityResolutionRole") + .assumedBy(new ServicePrincipal("entityresolution.amazonaws.com")) // AWS Entity Resolution assumes this role + .managedPolicies(List.of( + ManagedPolicy.fromAwsManagedPolicyName("AmazonS3FullAccess"), + ManagedPolicy.fromAwsManagedPolicyName("AWSEntityResolutionConsoleFullAccess"), + ManagedPolicy.fromAwsManagedPolicyName("AWSGlueConsoleFullAccess"), + ManagedPolicy.fromAwsManagedPolicyName("service-role/AWSGlueServiceRole") + )) + .build(); + + new CfnOutput(this, "EntityResolutionRoleArn", CfnOutputProps.builder() + .value(entityResolutionRole.getRoleArn()) + .description("The ARN of the EntityResolution Role") + .build()); + + // Add custom permissions for Entity Resolution + entityResolutionRole.addToPolicy(PolicyStatement.Builder.create() + .actions(List.of( + "entityresolution:StartMatchingWorkflow", + "entityresolution:GetMatchingWorkflow" + )) + .resources(List.of("*")) // Adjust permissions if needed + .build()); + + // ------------------------ OUTPUTS -------------------------------------- + new CfnOutput(this, "JsonErGlueTableArn", CfnOutputProps.builder() + .value(createGlueTableArn(jsonErGlueTable, jsonGlueTableName)) + .description("The ARN of the Json Glue Table") + .build()); + + new CfnOutput(this, "CsvErGlueTableArn", CfnOutputProps.builder() + .value(createGlueTableArn(csvErGlueTable, csvGlueTableName)) + .description("The ARN of the CSV Glue Table") + .build()); + + new CfnOutput(this, "GlueDataBucketName", CfnOutputProps.builder() + .value(erBucket.getBucketName()) // Outputs the bucket name + .description("The name of the Glue Data Bucket") + .build()); + } + + CfnTable createGlueTable(String id, String tableName, String databaseRef, Map schemaMap, String dataLocation, String serializationLib) { + return CfnTable.Builder.create(this, id) + .catalogId(this.getAccount()) + .databaseName(databaseRef) // Ensure Glue Table references the database correctly + .tableInput(CfnTable.TableInputProperty.builder() + .name(tableName) // Fixed table name reference + .tableType("EXTERNAL_TABLE") + .storageDescriptor(CfnTable.StorageDescriptorProperty.builder() + .columns(createColumns(schemaMap)) + .location(dataLocation) // Append subpath for data + .inputFormat("org.apache.hadoop.mapred.TextInputFormat") + .outputFormat("org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat") + .serdeInfo(CfnTable.SerdeInfoProperty.builder() + .serializationLibrary(serializationLib) // Set JSON SerDe + .parameters(Map.of("serialization.format", "1")) // Optional: Set the format for JSON + .build()) + .build()) + .build()) + .build(); + } + + List createColumns(Map schemaMap) { + return schemaMap.entrySet().stream() + .map(entry -> CfnTable.ColumnProperty.builder() + .name(entry.getKey()) + .type(entry.getValue()) + .build()) + .toList(); + } + + String createGlueTableArn(CfnTable glueTable, String glueTableName) { + return String.format("arn:aws:glue:%s:%s:table/%s/%s" + , this.getRegion() + , this.getAccount() + , glueTable.getDatabaseName() + , glueTableName + ); + } +} diff --git a/ruby/Gemfile b/ruby/Gemfile index 4cafb94cfc6..5dd220f262e 100644 --- a/ruby/Gemfile +++ b/ruby/Gemfile @@ -1,5 +1,5 @@ source 'https://rubygems.org' -ruby '3.1.2' +ruby '3.3.7' gem 'aws-sdk' gem 'cli-ui' diff --git a/ruby/Gemfile.lock b/ruby/Gemfile.lock index a69e07c85df..c6e4d1bcb82 100644 --- a/ruby/Gemfile.lock +++ b/ruby/Gemfile.lock @@ -1399,13 +1399,15 @@ GEM base64 (0.2.0) cli-ui (2.2.3) concurrent-ruby (1.2.2) - date (3.3.3) - diff-lcs (1.5.0) + date (3.4.1) + diff-lcs (1.6.0) i18n (1.14.1) concurrent-ruby (~> 1.0) jmespath (1.6.2) json (2.6.3) - logger (1.6.1) + language_server-protocol (3.17.0.4) + lint_roller (1.1.0) + logger (1.6.6) mail (2.8.1) mini_mime (>= 0.1.1) net-imap @@ -1417,12 +1419,12 @@ GEM multi_json (1.15.0) mustermann (3.0.3) ruby2_keywords (~> 0.0.1) - net-imap (0.3.4) + net-imap (0.3.8) date net-protocol net-pop (0.1.2) net-protocol - net-protocol (0.2.1) + net-protocol (0.2.2) timeout net-smtp (0.3.3) net-protocol @@ -1431,50 +1433,53 @@ GEM time uri openssl (3.1.0) - parallel (1.23.0) - parser (3.2.2.1) + parallel (1.26.3) + parser (3.3.7.1) ast (~> 2.4.1) + racc pp (0.4.0) prettyprint prettyprint (0.1.1) - rack (3.1.8) - rack-protection (4.1.0) + racc (1.8.1) + rack (3.1.11) + rack-protection (4.1.1) base64 (>= 0.1.0) logger (>= 1.6.0) rack (>= 3.0.0, < 4) - rack-session (2.0.0) + rack-session (2.1.0) + base64 (>= 0.1.0) rack (>= 3.0.0) rack-test (2.1.0) rack (>= 1.3) rainbow (3.1.1) rake (13.0.6) - regexp_parser (2.8.0) - rexml (3.3.9) - rspec (3.12.0) - rspec-core (~> 3.12.0) - rspec-expectations (~> 3.12.0) - rspec-mocks (~> 3.12.0) - rspec-core (3.12.2) - rspec-support (~> 3.12.0) - rspec-expectations (3.12.3) + regexp_parser (2.10.0) + rspec (3.13.0) + rspec-core (~> 3.13.0) + rspec-expectations (~> 3.13.0) + rspec-mocks (~> 3.13.0) + rspec-core (3.13.3) + rspec-support (~> 3.13.0) + rspec-expectations (3.13.3) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.12.0) - rspec-mocks (3.12.5) + rspec-support (~> 3.13.0) + rspec-mocks (3.13.2) diff-lcs (>= 1.2.0, < 2.0) - rspec-support (~> 3.12.0) - rspec-support (3.12.0) - rubocop (1.52.0) + rspec-support (~> 3.13.0) + rspec-support (3.13.2) + rubocop (1.72.2) json (~> 2.3) + language_server-protocol (~> 3.17.0.2) + lint_roller (~> 1.1.0) parallel (~> 1.10) - parser (>= 3.2.0.0) + parser (>= 3.3.0.2) rainbow (>= 2.2.2, < 4.0) - regexp_parser (>= 1.8, < 3.0) - rexml (>= 3.2.5, < 4.0) - rubocop-ast (>= 1.28.0, < 2.0) + regexp_parser (>= 2.9.3, < 3.0) + rubocop-ast (>= 1.38.0, < 2.0) ruby-progressbar (~> 1.7) - unicode-display_width (>= 2.4.0, < 3.0) - rubocop-ast (1.29.0) - parser (>= 3.2.1.0) + unicode-display_width (>= 2.4.0, < 4.0) + rubocop-ast (1.38.0) + parser (>= 3.3.1.0) rubocop-github (0.20.0) rubocop (>= 1.37) rubocop-performance (>= 1.15) @@ -1486,32 +1491,36 @@ GEM activesupport (>= 4.2.0) rack (>= 1.1) rubocop (>= 1.33.0, < 2.0) - rubocop-rake (0.6.0) - rubocop (~> 1.0) + rubocop-rake (0.7.1) + lint_roller (~> 1.1) + rubocop (>= 1.72.1) ruby-progressbar (1.13.0) ruby2_keywords (0.0.5) rubyzip (2.3.2) sequel (5.69.0) - sinatra (4.1.0) + sinatra (4.1.1) logger (>= 1.6.0) mustermann (~> 3.0) rack (>= 3.0.0, < 4) - rack-protection (= 4.1.0) + rack-protection (= 4.1.1) rack-session (>= 2.0.0, < 3) tilt (~> 2.0) stringio (3.0.7) - tilt (2.4.0) + tilt (2.6.0) time (0.2.2) date - timeout (0.3.2) + timeout (0.4.3) tzinfo (2.0.6) concurrent-ruby (~> 1.0) - unicode-display_width (2.4.2) - uri (0.12.2) + unicode-display_width (3.1.4) + unicode-emoji (~> 4.0, >= 4.0.4) + unicode-emoji (4.0.4) + uri (0.12.4) zip (2.0.2) PLATFORMS arm64-darwin-22 + x64-mingw-ucrt x86_64-linux DEPENDENCIES @@ -1537,7 +1546,7 @@ DEPENDENCIES zip RUBY VERSION - ruby 3.1.2p20 + ruby 3.3.7p123 BUNDLED WITH 2.3.7 diff --git a/ruby/example_code/dynamodb/README.md b/ruby/example_code/dynamodb/README.md index 4256dd73186..60304150f70 100644 --- a/ruby/example_code/dynamodb/README.md +++ b/ruby/example_code/dynamodb/README.md @@ -184,4 +184,4 @@ To learn more about the contributing process, see [CONTRIBUTING.md](../../../CON Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/ruby/example_code/dynamodb/scaffold.rb b/ruby/example_code/dynamodb/scaffold.rb index b36f241f080..748ce88353c 100644 --- a/ruby/example_code/dynamodb/scaffold.rb +++ b/ruby/example_code/dynamodb/scaffold.rb @@ -68,7 +68,7 @@ def create_table(table_name) { attribute_name: 'year', attribute_type: 'N' }, { attribute_name: 'title', attribute_type: 'S' } ], - provisioned_throughput: { read_capacity_units: 10, write_capacity_units: 10 } + billing_mode: 'PAY_PER_REQUEST' ) @dynamo_resource.client.wait_until(:table_exists, table_name: table_name) @table diff --git a/rustv1/cross_service/photo_asset_management/integration/tests/update_label.rs b/rustv1/cross_service/photo_asset_management/integration/tests/update_label.rs index 1fec6f2d329..664d541d8e4 100644 --- a/rustv1/cross_service/photo_asset_management/integration/tests/update_label.rs +++ b/rustv1/cross_service/photo_asset_management/integration/tests/update_label.rs @@ -2,7 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use std::collections::HashMap; -use aws_sdk_dynamodb::types::{AttributeDefinition, KeySchemaElement, ProvisionedThroughput}; +use aws_sdk_dynamodb::types::{AttributeDefinition, KeySchemaElement, BillingMode}; use aws_sdk_rekognition::types::Label; use photo_asset_management::{ common::{init_tracing_subscriber, Common}, @@ -27,13 +27,8 @@ async fn create_table(common: &Common) -> Result<(), impl std::error::Error> { .attribute_definitions( AttributeDefinition::builder() .attribute_name("Label") - .attribute_type(aws_sdk_dynamodb::types::ScalarAttributeType::S) - .build(), - ) - .provisioned_throughput( - ProvisionedThroughput::builder() - .write_capacity_units(1) - .read_capacity_units(1) + .attribute_type(aws_sdk_dynamodb::types::ScalarAttributeType::S), + .billing_mode(aws_sdk_dynamodb::types::BillingMode::PayPerRequest) .build(), ) .send() diff --git a/rustv1/examples/bedrock-runtime/README.md b/rustv1/examples/bedrock-runtime/README.md index ae64bcc7542..abb0df99ce4 100644 --- a/rustv1/examples/bedrock-runtime/README.md +++ b/rustv1/examples/bedrock-runtime/README.md @@ -28,6 +28,13 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `rustv +### Scenarios + +Code examples that show you how to accomplish a specific task by calling multiple +functions within the same service. + +- [Tool use with the Converse API](src/bin/tool-use.rs) + ### Anthropic Claude - [Converse](src/bin/converse.rs#L43) @@ -48,6 +55,18 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `rustv +#### Tool use with the Converse API + +This example shows you how to build a typical interaction between an application, a generative AI model, and connected tools or APIs to mediate interactions between the AI and the outside world. It uses the example of connecting an external weather API to the AI model so it can provide real-time weather information based on user input. + + + + + + + + + ### Tests ⚠ Running tests might result in charges to your AWS account. @@ -74,4 +93,4 @@ in the `rustv1` folder. Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. -SPDX-License-Identifier: Apache-2.0 \ No newline at end of file +SPDX-License-Identifier: Apache-2.0 diff --git a/rustv1/examples/dynamodb/src/bin/crud.rs b/rustv1/examples/dynamodb/src/bin/crud.rs index 9a5e4b1ae83..1f515945ec7 100644 --- a/rustv1/examples/dynamodb/src/bin/crud.rs +++ b/rustv1/examples/dynamodb/src/bin/crud.rs @@ -8,7 +8,7 @@ use aws_sdk_dynamodb::error::SdkError; use aws_sdk_dynamodb::operation::create_table::CreateTableError; use aws_sdk_dynamodb::operation::put_item::PutItemError; use aws_sdk_dynamodb::types::{ - AttributeDefinition, AttributeValue, KeySchemaElement, KeyType, ProvisionedThroughput, + AttributeDefinition, AttributeValue, BillingMode, KeySchemaElement, KeyType, ScalarAttributeType, Select, TableStatus, }; use aws_sdk_dynamodb::{config::Region, meta::PKG_VERSION, Client, Error}; @@ -63,18 +63,12 @@ async fn make_table( .build() .expect("creating KeySchemaElement"); - let pt = ProvisionedThroughput::builder() - .read_capacity_units(10) - .write_capacity_units(5) - .build() - .expect("creating ProvisionedThroughput"); - match client .create_table() .table_name(table) .key_schema(ks) .attribute_definitions(ad) - .provisioned_throughput(pt) + .billing_mode(BillingMode::PayPerRequest) .send() .await { diff --git a/rustv1/examples/dynamodb/src/bin/dynamodb-helloworld.rs b/rustv1/examples/dynamodb/src/bin/dynamodb-helloworld.rs index 07b62c2d12e..999bb036507 100644 --- a/rustv1/examples/dynamodb/src/bin/dynamodb-helloworld.rs +++ b/rustv1/examples/dynamodb/src/bin/dynamodb-helloworld.rs @@ -5,7 +5,7 @@ use aws_config::meta::region::RegionProviderChain; use aws_sdk_dynamodb::types::{ - AttributeDefinition, KeySchemaElement, KeyType, ProvisionedThroughput, ScalarAttributeType, + AttributeDefinition, BillingMode, KeySchemaElement, KeyType, ScalarAttributeType, }; use aws_sdk_dynamodb::{config::Region, meta::PKG_VERSION, Client, Error}; use clap::Parser; @@ -47,18 +47,12 @@ async fn create_table(client: &Client) -> Result<(), Error> { .build() .expect("creating AttributeDefinition"); - let pt = ProvisionedThroughput::builder() - .write_capacity_units(10) - .read_capacity_units(10) - .build() - .expect("creating ProvisionedThroughput"); - let new_table = client .create_table() .table_name("test-table") .key_schema(ks) .attribute_definitions(ad) - .provisioned_throughput(pt) + .billing_mode(BillingMode::PayPerRequest) .send() .await?; println!( diff --git a/rustv1/examples/dynamodb/src/bin/partiql.rs b/rustv1/examples/dynamodb/src/bin/partiql.rs index 1a1305d085b..e4e67486f97 100644 --- a/rustv1/examples/dynamodb/src/bin/partiql.rs +++ b/rustv1/examples/dynamodb/src/bin/partiql.rs @@ -8,7 +8,7 @@ use aws_sdk_dynamodb::error::SdkError; use aws_sdk_dynamodb::operation::create_table::CreateTableError; use aws_sdk_dynamodb::operation::execute_statement::ExecuteStatementError; use aws_sdk_dynamodb::types::{ - AttributeDefinition, AttributeValue, KeySchemaElement, KeyType, ProvisionedThroughput, + AttributeDefinition, AttributeValue, BillingMode, KeySchemaElement, KeyType, ScalarAttributeType, TableStatus, }; use aws_sdk_dynamodb::{config::Region, meta::PKG_VERSION, Client, Error}; @@ -44,7 +44,7 @@ fn random_string(n: usize) -> String { .collect() } -/// Create a new table. +/// Create a new on-demand table. // snippet-start:[dynamodb.rust.partiql-make_table] async fn make_table( client: &Client, @@ -63,18 +63,12 @@ async fn make_table( .build() .expect("creating KeySchemaElement"); - let pt = ProvisionedThroughput::builder() - .read_capacity_units(10) - .write_capacity_units(5) - .build() - .expect("creating ProvisionedThroughput"); - match client .create_table() .table_name(table) .key_schema(ks) .attribute_definitions(ad) - .provisioned_throughput(pt) + .billing_mode(BillingMode::PayPerRequest) .send() .await { diff --git a/rustv1/examples/dynamodb/src/scenario/create.rs b/rustv1/examples/dynamodb/src/scenario/create.rs index ea87771e641..ec127a1538c 100644 --- a/rustv1/examples/dynamodb/src/scenario/create.rs +++ b/rustv1/examples/dynamodb/src/scenario/create.rs @@ -4,11 +4,11 @@ use crate::scenario::error::Error; use aws_sdk_dynamodb::operation::create_table::CreateTableOutput; use aws_sdk_dynamodb::types::{ - AttributeDefinition, KeySchemaElement, KeyType, ProvisionedThroughput, ScalarAttributeType, + AttributeDefinition, BillingMode, KeySchemaElement, KeyType, ScalarAttributeType, }; use aws_sdk_dynamodb::Client; -// Create a table. +// Create an on-demand table. // snippet-start:[dynamodb.rust.create-table] pub async fn create_table( client: &Client, @@ -30,18 +30,12 @@ pub async fn create_table( .build() .map_err(Error::BuildError)?; - let pt = ProvisionedThroughput::builder() - .read_capacity_units(10) - .write_capacity_units(5) - .build() - .map_err(Error::BuildError)?; - let create_table_response = client .create_table() .table_name(table_name) .key_schema(ks) .attribute_definitions(ad) - .provisioned_throughput(pt) + .billing_mode(BillingMode::PayPerRequest) .send() .await; diff --git a/rustv1/examples/dynamodb/src/scenario/movies/startup.rs b/rustv1/examples/dynamodb/src/scenario/movies/startup.rs index 646c5bf4a24..246595c8026 100644 --- a/rustv1/examples/dynamodb/src/scenario/movies/startup.rs +++ b/rustv1/examples/dynamodb/src/scenario/movies/startup.rs @@ -5,8 +5,8 @@ use crate::scenario::error::Error; use aws_sdk_dynamodb::{ operation::create_table::builders::CreateTableFluentBuilder, types::{ - AttributeDefinition, KeySchemaElement, KeyType, ProvisionedThroughput, ScalarAttributeType, - TableStatus, WriteRequest, + AttributeDefinition, KeySchemaElement, KeyType, ScalarAttributeType, TableStatus, + WriteRequest, }, Client, }; @@ -14,8 +14,6 @@ use futures::future::join_all; use std::{collections::HashMap, time::Duration}; use tracing::{debug, info, trace}; -const CAPACITY: i64 = 10; - #[tracing::instrument(level = "trace")] pub async fn initialize(client: &Client, table_name: &str) -> Result<(), Error> { info!("Initializing Movies DynamoDB in {table_name}"); @@ -24,7 +22,7 @@ pub async fn initialize(client: &Client, table_name: &str) -> Result<(), Error> info!("Found existing table {table_name}"); } else { info!("Table does not exist, creating {table_name}"); - create_table(client, table_name, "year", "title", CAPACITY)? + create_table(client, table_name, "year", "title")? .send() .await?; await_table(client, table_name).await?; @@ -55,9 +53,8 @@ pub fn create_table( table_name: &str, primary_key: &str, sort_key: &str, - capacity: i64, ) -> Result { - info!("Creating table: {table_name} with capacity {capacity} and key structure {primary_key}:{sort_key}"); + info!("Creating table: {table_name} key structure {primary_key}:{sort_key}"); Ok(client .create_table() .table_name(table_name) @@ -89,13 +86,7 @@ pub fn create_table( .build() .expect("Failed to build attribute definition"), ) - .provisioned_throughput( - ProvisionedThroughput::builder() - .read_capacity_units(capacity) - .write_capacity_units(capacity) - .build() - .expect("Failed to specify ProvisionedThroughput"), - )) + .billing_mode(aws_sdk_dynamodb::types::BillingMode::PayPerRequest)) } // snippet-end:[dynamodb.rust.movies-create_table_request] diff --git a/scenarios/basics/entity_resolution/README.md b/scenarios/basics/entity_resolution/README.md new file mode 100644 index 00000000000..14679cbcc52 --- /dev/null +++ b/scenarios/basics/entity_resolution/README.md @@ -0,0 +1,53 @@ +# AWS Entity Resolution Program + +## Overview +This AWS Entity Resolution basic scenario demonstrates how to interact with the AWS Entity Resolution service using an AWS SDK. This application demonstrates how to use AWS Entity Resolution to integrate and deduplicate data from multiple sources using machine learning-based matching. The program walks through setting up AWS resources, uploading structured data, defining schema mappings, creating a matching workflow, and running a matching job. + + +**Note:** See the [specification document](SPECIFICATION.md) for a complete list of operations. + +## Features + +1. Uses AWS CloudFormation to create necessary resources: + +- AWS Glue Data Catalog table + +- AWS IAM role + +- AWS S3 bucket + +- AWS Entity Resolution Schema + +2. Uploads sample JSON and CSV data to S3 + +3. Creates schema mappings for JSON and CSV datasets + +4. Creates and starts an Entity Resolution matching workflow + +5. Retrieves job details and schema mappings + +6. Lists available schema mappings + +7. Tags AWS resources for better organization + +8. Views the results of the workflow + +## Resources + +This Basics scenario requires an IAM role that has permissions to work with the AWS Entity Resolution service, +an AWS Glue database, and an S3 bucket. A CDK script is provided to create these resources. +See the resources [Readme](../../../resources/cdk/entityresolution_resources/README.md) file. + +## Implementations + +This scenario example will be implemented in the following languages: + +- Java +- Python +- Kotlin + +## Additional Reading + +- [AWS Entity Resolution Documentation](https://docs.aws.amazon.com/entityresolution/latest/userguide/what-is-service.html) + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 diff --git a/scenarios/basics/entity_resolution/SPECIFICATION.md b/scenarios/basics/entity_resolution/SPECIFICATION.md new file mode 100644 index 00000000000..1f0880688f4 --- /dev/null +++ b/scenarios/basics/entity_resolution/SPECIFICATION.md @@ -0,0 +1,448 @@ +# Specification for the AWS Entity Resolution Service Scenario + +## Overview + +This SDK Basics scenario demonstrates how to interact with AWS Entity Resolution +using an AWS SDK. It demonstrates various tasks such as creating a schema +mapping, creating an matching workflow, starting a workflow, and so on. Finally, +this scenario demonstrates how to clean up resources. + +## Resources + +This Basics scenario requires an IAM role that has permissions to work with the +AWS Entity Resolution service, an AWS Glue database and a table, and two S3 +buckets. +A [CDK script](../../../resources/cdk/entityresolution_resources/README.md +) is provided to create these resources. + +## Hello AWS Entity Resolution + +This program is intended for users not familiar with the AWS Entity Resolution +Service to easily get up and running. The program uses a +`listMatchingWorkflowsPaginator` to demonstrate how you can read through +workflow information. + +## Basics Scenario Program Flow + +The AWS Entity Resolution Basics scenario executes the following operations. + +1. **Create a schema mapping**: + - Description: Creates a schema mapping by invoking the + `createSchemaMapping` method. + - Exception Handling: Check to see if a `ConflictException` is thrown, which + indicates that the schema mapping already exists. If the exception is + thrown, display the information and end the program. + +2. **Create a Matching Workflow**: + - Description: Creates a new matching workflow that defines how entities + should be resolved and matched. The method `createMatchingWorkflow` is + called. + - Exception Handling: Check to see if a `ConflictException` is thrown, which + is thrown if the matching workflow already exists. ALso check to see if a `ValidationException` is thrown. If so, display the message and end the program. + +3. **Start Matching Workflow**: + - Description: Initiates a matching workflow by calling the + `startMatchingJob` method to process entity resolution based on predefined + configurations. + - Exception Handling: Check to see if an `ConflictException` is thrown, + which indicates that the matching workflow job is already running. If the + exception is thrown, display the message and end the program. + +4. **Get Workflow Job Details**: + - Description: Retrieves details about a specific matching workflow job by + calling the `getMatchingJob` method. + - Exception Handling: Check to see if an `ResourceNotFoundException` is + thrown, which indicates that the workflow cannot be found. If the + exception is thrown, display the message and end the program. + +5. **Get Schema Mapping**: + - Description: Returns the `SchemaMapping` of a given name by calling the + `getSchemaMapping` method. + - Exception Handling: Check to see if a `ResourceNotFoundException` is + thrown. If so, display the message and end the program. + +6. **List Matching Workflows**: + - Description: Lists all matching workflows created within the account by + calling the `listMatchingWorkflows` method. + - Exception Handling: Check to see if an `CompletionException` is thrown. If + so, display the message and end the program. + +7. **Tag Resource**: + - Description: Adds tags associated with an AWS Entity Resolution resource + by calling the`tagResource` method. + - Exception Handling: Check to see if a `ResourceNotFoundException` is + thrown. If so, display the message and end the program +8. **View the results of the AWS Entity Resolution Workflow**: + - Description: View the workflow results by calling the + `getMatchingJob` method. + - Exception Handling: Check to see if an `ResourceNotFoundException` is thrown. If + so, display the message and end the program. + +9. **Delete the AWS resources**: + - Description: Delete the AWS resouces including the workflow and schema mappings by calling the + `deleteMatchingWorkflow` and `deleteSchemaMapping` methods. + - Exception Handling: Check to see if an `ResourceNotFoundException` is thrown. If + so, display the message and end the program. + - Finally delete the CloudFormation Stack by calling these method: + - CloudFormationHelper.emptyS3Bucket(glueBucketName); + - CloudFormationHelper.destroyCloudFormationStack + +### Program execution + +The following shows the output of the AWS Entity Resolution Basics scenario in +the console. + +``` +Welcome to the AWS Entity Resolution Scenario. +AWS Entity Resolution is a fully-managed machine learning service provided by +Amazon Web Services (AWS) that helps organizations extract, link, and +organize information from multiple data sources. It leverages natural +language processing and deep learning models to identify and resolve +entities, such as people, places, organizations, and products, +across structured and unstructured data. + +With Entity Resolution, customers can build robust data integration +pipelines to combine and reconcile data from multiple systems, databases, +and documents. The service can handle ambiguous, incomplete, or conflicting +information, and provide a unified view of entities and their relationships. +This can be particularly valuable in applications such as customer 360, +fraud detection, supply chain management, and knowledge management, where +accurate entity identification is crucial. + +The `EntityResolutionAsyncClient` interface in the AWS SDK for Java 2.x +provides a set of methods to programmatically interact with the AWS Entity +Resolution service. This allows developers to automate the entity extraction, +linking, and deduplication process as part of their data processing workflows. +With Entity Resolution, organizations can unlock the value of their data, +improve decision-making, and enhance customer experiences by having a reliable, +comprehensive view of their key entities. + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +To prepare the AWS resources needed for this scenario application, the next step uploads +a CloudFormation template whose resulting stack creates the following resources: +- An AWS Glue Data Catalog table +- An AWS IAM role +- An AWS S3 bucket +- An AWS Entity Resolution Schema + +It can take a couple minutes for the Stack to finish creating the resources. + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Generating resources... +Stack creation requested, ARN is arn:aws:cloudformation:us-east-1:814548047983:stack/EntityResolutionCdkStack/858988e0-f604-11ef-916b-0affc298c80f +Stack created successfully +-------------------------------------------------------------------------------- + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Upload the following JSON objects to the erbucketf684533d2680435fa99d24b1bdaf5179 S3 bucket. +{"id":"1","name":"Jane Doe","email":"jane.doe@example.com"} +{"id":"2","name":"John Doe","email":"john.doe@example.com"} +{"id":"3","name":"Jorge Souza","email":"jorge_souza@example.com"} + +Upload the following CSV data to the erbucketf684533d2680435fa99d24b1bdaf5179 S3 bucket. +id,name,email,phone +1,Jane B.,Doe,jane.doe@example.com,555-876-9846 +2,John Doe Jr.,john.doe@example.com,555-654-3210 +3,María García,maría_garcia@company.com,555-567-1234 +4,Mary Major,mary_major@company.com,555-222-3333 + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +The JSON and CSV objects have been uploaded to the S3 bucket. + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +1. Create Schema Mapping +Entity Resolution schema mapping aligns and integrates data from +multiple sources by identifying and matching corresponding entities +like customers or products. It unifies schemas, resolves conflicts, +and uses machine learning to link related entities, enabling a +consolidated, accurate view for improved data quality and decision-making. + +In this example, the schema mapping lines up with the fields in the JSON and CSV objects. That is, +it contains these fields: id, name, and email. + +[jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2] schema mapping Created Successfully! +The JSON schema mapping name is jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2 +[csv-8d05576d-66bb-4fcf-a29c-8c3de57ce48c] schema mapping Created Successfully! +The CSV schema mapping name is csv-8d05576d-66bb-4fcf-a29c-8c3de57ce48c + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +2. Create an AWS Entity Resolution Workflow. +An Entity Resolution matching workflow identifies and links records +across datasets that represent the same real-world entity, such as +customers or products. Using techniques like schema mapping, +data profiling, and machine learning algorithms, +it evaluates attributes like names or emails to detect duplicates +or relationships, even with variations or inconsistencies. +The workflow outputs consolidated, de-duplicated data. + +We will use the machine learning-based matching technique. + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Workflow created successfully. +The workflow ARN is: arn:aws:entityresolution:us-east-1:814548047983:matchingworkflow/workflow-39216b7f-f00b-4896-84ae-cd7edcfc7872 + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +3. Start the matching job of the workflow-39216b7f-f00b-4896-84ae-cd7edcfc7872 workflow. + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Job ID: f25d2707729646a4af27874d991e22c5 +The matching job was successfully started. + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +4. While the matching job is running, let's look at other API methods. First, let's get details for job f25d2707729646a4af27874d991e22c5 + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Job status: RUNNING +Job details: GetMatchingJobResponse(JobId=f25d2707729646a4af27874d991e22c5, StartTime=2025-02-28T18:49:14.921Z, Status=RUNNING) +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +5. Get the schema mapping for the JSON data. + +Enter 'c' followed by to continue: +c +Continuing with the program... + +Attribute Name: id, Attribute Type: UNIQUE_ID +Attribute Name: name, Attribute Type: NAME +Attribute Name: email, Attribute Type: EMAIL_ADDRESS +Schema mapping ARN is arn:aws:entityresolution:us-east-1:814548047983:schemamapping/jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2 + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +6. List Schema Mappings. +Schema Mapping Name: csv-33f8e392-74e7-4a08-900a-652b94f86250 +Schema Mapping Name: csv-3b68e38b-1d5c-4836-bfc7-92ac7339e5c7 +Schema Mapping Name: csv-4f547deb-56c1-4923-9119-556bc43df08d +Schema Mapping Name: csv-6fe8bbc3-ebb5-4800-ab49-a89f75a87905 +Schema Mapping Name: csv-812ecad3-3175-49c3-93a5-d3175396d6e7 +Schema Mapping Name: csv-8d05576d-66bb-4fcf-a29c-8c3de57ce48c +Schema Mapping Name: csv-90a464e1-f050-422c-8f5f-0726541a5858 +Schema Mapping Name: csv-ebad3e3d-27be-4ed4-ae35-7401265e57bd +Schema Mapping Name: csv-f752d395-857b-4106-b2f2-85e1da5e3040 +Schema Mapping Name: jsonschema-363dc915-0540-406e-8d3f-4f1435e0b942 +Schema Mapping Name: jsonschema-5b1ad3e1-a840-4c4f-b791-5e9e1893fe7e +Schema Mapping Name: jsonschema-8623e0ec-bb8c-4fe2-a998-609eae08d84d +Schema Mapping Name: jsonschema-93d5fd04-f10e-4274-a181-489bea7b92db +Schema Mapping Name: jsonschema-b1653c13-ce77-471d-a3d5-ae4877216a74 +Schema Mapping Name: jsonschema-c09b3414-384c-4e3d-90c8-61e48abde04d +Schema Mapping Name: jsonschema-d9a6edc0-a9bd-4553-bb71-fbf0d6064ef9 +Schema Mapping Name: jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2 +Schema Mapping Name: jsonschema-f0a259e0-f4e5-493a-bfd5-32740d2fa24d +Schema Mapping Name: schema2135 +Schema Mapping Name: schema435 +Schema Mapping Name: schema455 +Schema Mapping Name: schema456 +Schema Mapping Name: schema4648 +Schema Mapping Name: schema4720 +Schema Mapping Name: schema4848 +Schema Mapping Name: schema6758 +Schema Mapping Name: schema8775 +Schema Mapping Name: schemaName100 + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +7. Tag the jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2 resource. +Tags can help you organize and categorize your Entity Resolution resources. +You can also use them to scope user permissions by granting a user permission +to access or change only resources with certain tag values. +In Entity Resolution, SchemaMapping and MatchingWorkflow can be tagged. For this example, +the SchemaMapping is tagged. + +Successfully tagged the resource. + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +8. View the results of the AWS Entity Resolution Workflow. +You cannot view the result of the workflow that is in a running state. +In order to view the results, you need to wait for the workflow that we started in step 3 to complete. + +If you choose not to wait, you cannot view the results. You can perform +this task manually in the AWS Management Console. + +This can take up to 30 mins (y/n). + +y +You selected to view the Entity Resolution Workflow results. +29:01Job status: RUNNING +28:01Job status: RUNNING +27:01Job status: RUNNING +26:01Job status: RUNNING +25:01Job status: RUNNING +24:01Job status: RUNNING +23:01Job status: RUNNING +22:01Job status: RUNNING +21:01Job status: RUNNING +20:01Job status: RUNNING +19:01Job status: RUNNING +18:01Job status: RUNNING +17:01Job status: RUNNING +16:01Job status: RUNNING +15:01Job status: RUNNING +14:01Job status: RUNNING +13:01Job status: RUNNING +12:01Job status: RUNNING +11:01Job status: RUNNING +10:01Job status: RUNNING +09:01Job status: RUNNING +08:01Job status: RUNNING +07:01Job status: SUCCEEDED + +Countdown complete: Workflow is in Completed state! +Job metrics fetched successfully for jobId: f25d2707729646a4af27874d991e22c5 +Number of input records: 7 +Number of match ids: 6 +Number of records not processed: 0 +Number of total records processed: 7 +The following explains the output data generated by the Entity Resolution workflow. The output data is stored in the erbucketf684533d2680435fa99d24b1bdaf5179 bucket. + + ------------------------------------------------------------------------------ ----------------- ---- ------------------ --------------------------- -------------- ---------- --------------------------------------------------- + InputSourceARN ConfidenceLevel id name email phone RecordId MatchID + ------------------------------------------------------------------------------ ----------------- ---- ------------------ --------------------------- -------------- ---------- --------------------------------------------------- + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/csvgluetable 7 Jane E. Doe jane_doe@company.com 111-222-3333 7 036298535ed6471ebfc358fc76e1f51200006472446402560 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/csvgluetable 0.90523 2 Bob Smith Jr. bob.smith@example.com 987-654-3210 2 6ae2d360d6594089837eafc31b20f31600003506806140928 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/jsongluetable 0.90523 2 Bob Smith bob.smith@example.com 2 6ae2d360d6594089837eafc31b20f31600003506806140928 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/csvgluetable 0.89398956 1 Alice B. Johnson alice.johnson@example.com 746-876-9846 1 34a5075b289247efa1847ab292ed677400009137438953472 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/jsongluetable 0.89398956 1 Alice Johnson alice.johnson@example.com 1 34a5075b289247efa1847ab292ed677400009137438953472 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/csvgluetable 0.605295 3 Charlie Black charlie.black@company.com 345-567-1234 3 92c8ef3f68b34948a3af998d700ed02700002146028888064 + arn:aws:glue:region:xxxxxxxxxxxx:table/entity_resolution_db/jsongluetable 0.605295 3 Charlie Black charlie.black@example.com 3 92c8ef3f68b34948a3af998d700ed02700002146028888064 + +Note that each of the last 3 pairs of records are considered a match even though the 'name' or 'email' differ between the records; +For example 'Bob Smith Jr.' compared to 'Bob Smith'. +The confidence level is a value between 0 and 1, where 1 indicates a perfect match. In the last pair of matched records, +the confidence level is lower for the differing email addresses. + + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +9. Do you want to delete the resources, including the workflow? (y/n) +You cannot delete the workflow that is in a running state. +In order to delete the workflow, you need to wait for the workflow to complete. + +You can delete the workflow manually in the AWS Management Console at a later time. + +If you already waited for the workflow to complete in the previous step, +the workflow is completed and you can delete it. + +If the workflow is not completed, this can take up to 30 mins (y/n). + +y +workflow-39216b7f-f00b-4896-84ae-cd7edcfc7872 was deleted +Workflow deleted successfully! +Schema mapping 'jsonschema-ef86075e-cf5e-4bb1-be50-e0f19743ddb2' deleted successfully. +Schema mapping 'csv-8d05576d-66bb-4fcf-a29c-8c3de57ce48c' deleted successfully. +Both schema mappings were deleted successfully! + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +Now we delete the CloudFormation stack, which deletes +the resources that were created at the beginning of this scenario. + + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +Delete stack requested .... +Stack deleted successfully. +Resources deleted successfully! + +Enter 'c' followed by to continue: +c +Continuing with the program... + +-------------------------------------------------------------------------------- +-------------------------------------------------------------------------------- +This concludes the AWS Entity Resolution scenario. +-------------------------------------------------------------------------------- + +``` + +## SOS Tags + +The following table describes the metadata used in this Basics Scenario. + +| action | metadata file | metadata key | +|------------------------|--------------------------------|--------------------------------------------| +| `createWorkflow` | entityresolution_metadata.yaml |entityresolution_CreateMatchingWorkflow | +| `createSchemaMapping` | entityresolution_metadata.yaml |entityresolution_CreateSchemaMapping | +| `startMatchingJob` | entityresolution_metadata.yaml |entityresolution_StartMatchingJob | +| `getMatchingJob` | entityresolution_metadata.yaml |entityresolution_GetMatchingJob | +| `listMatchingWorkflows`| entityresolution_metadata.yaml |entityresolution_ListMatchingWorkflows | +| `getSchemaMapping` | entityresolution_metadata.yaml |entityresolution_GetSchemaMapping | +| `listSchemaMappings` | entityresolution_metadata.yaml |entityresolution_ListSchemaMappings | +| `tagResource ` | entityresolution_metadata.yaml |entityresolution_TagEntityResource | +| `deleteWorkflow ` | entityresolution_metadata.yaml |ntityresolution_DeleteMatchingWorkflow | +| `deleteMapping ` | entityresolution_metadata.yaml |entityresolution_DeleteSchemaMapping | +| `listMappingJobs ` | entityresolution_metadata.yaml |entityresolution_Hello | +| `scenario` | entityresolution_metadata.yaml |entityresolution_Scenario | + + + + diff --git a/scenarios/features/bedrock_converse_tool/README.md b/scenarios/features/bedrock_converse_tool/README.md new file mode 100644 index 00000000000..3e8cb087dcf --- /dev/null +++ b/scenarios/features/bedrock_converse_tool/README.md @@ -0,0 +1,42 @@ +# Bedrock Runtime Converse API with Tool Feature Scenario + +## Overview + +This example shows how to use AWS SDKs and the Amazon Bedrock Converse API to call a custom tool from a large language model (LLM) as part of a multistep conversation. The example creates a weather tool that leverages the Open-Meteo API to retrieve current weather information based on user input. + +[Bedrock Converse API with Tool Definition](https://docs.aws.amazon.com/bedrock/latest/userguide/tool-use-inference-call.html). + +This example illustrates a typical interaction between a generative AI model, an application, and connected tools or APIs to solve a problem or achieve a specific goal. The scenario follows these steps: + +1. Set up the system prompt and tool configuration. +2. Create a client to interact with Amazon Bedrock. +3. Prompt the user for their weather request. +4. Send the user input including the conversation history to the model. +5. The model processes the input and determines if a connected tool or API needs to be used. If this is the case, the model returns a tool use request with specific parameters needed to invoke the tool, and a unique tool use ID to correlate tool responses to the request. +6. The scenario application invokes the tool to fetch weather data, and append the response and tool use ID to the conversation. +7. The model uses the tool response to generate a final response. If additional tool requests are needed, the process is repeated. If the max recursion is reached, the conversation is ended. +8. Once the final response is received and printed, the application returns to the prompt. + +![img.png](toolscenario.png) + +![img.png](toolscenario.png) + +### Resources + +No additional resources are needed for this scenario. + +## Implementations + +This example is implemented in the following languages: + +- [.NET](../../../dotnetv3/Bedrock-runtime/Scenarios/ConverseToolScenario/README.md) + +## Additional resources + +- [Documentation: The Amazon Bedrock User Guide](https://docs.aws.amazon.com/bedrock/latest/userguide/what-is-bedrock.html) +- [Tutorials: A developer's guide to Bedrock's new Converse API](https://community.aws/content/2dtauBCeDa703x7fDS9Q30MJoBA/amazon-bedrock-converse-api-developer-guide) +- [More examples: Amazon Bedrock code examples and scenarios in multiple programming languages](https://docs.aws.amazon.com/bedrock/latest/userguide/service_code_examples.html) + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. SPDX-License-Identifier: Apache-2.0 diff --git a/scenarios/features/bedrock_converse_tool/SPECIFICATION.md b/scenarios/features/bedrock_converse_tool/SPECIFICATION.md new file mode 100644 index 00000000000..474980999a9 --- /dev/null +++ b/scenarios/features/bedrock_converse_tool/SPECIFICATION.md @@ -0,0 +1,246 @@ +# Bedrock Runtime Converse API with Tool Feature Scenario - Technical specification + +This document contains the technical specifications for _Bedrock Runtime Converse API with Tool Feature Scenario_, a feature scenario that showcases AWS services and SDKs. It is primarily intended for the AWS code examples team to use while developing this example in additional languages. + +This document explains the following: + +- Architecture and features of the example scenario. +- Metadata information for the scenario. +- Sample reference output. + +For an introduction, see the [README.md](README.md). + +--- + +### Table of contents + +- [User Input](#user-input) +- [Example Output](#example-output) +- [Errors](#errors) +- [Metadata](#metadata) + +## User Input + +The user's input is used as the starting point for the Bedrock Runtime conversation, and each response is added to an array of messages. +The model should respond when it needs to invoke the tool, and the application should run the tool and append the response to the conversation. +This process can be repeated as needed until a maximum number of recursions (5). See the .NET implementation for an example of the processing of the messages. Following is an example of how the conversation could go: + +1. Greet the user and provide an overview of the application. +1. Handle the user's weather information request: + 1. The user requests weather information. This request is sent to the Bedrock model. + 2. The model response includes a tool request, with a latitude and longitude to provide to the tool. + 3. The application then uses the Weather_Tool to retrieve the current weather data for those coordinates, and appends that response as a tool response to the conversation. The conversation is sent back to the model. + 4. The model responds with either a final response, or a request for more information. The process repeats. + 5. The application prints the final response. +1. Any off topic requests should be handled according to the system prompt. This prompt is provided below. +1. The user can type 'x' to exit the application. + +#### System prompt +``` +You are a weather assistant that provides current weather data for user-specified locations using only +the Weather_Tool, which expects latitude and longitude. Infer the coordinates from the location yourself. +If the user provides coordinates, infer the approximate location and refer to it in your response. +To use the tool, you strictly apply the provided tool specification. + +- Explain your step-by-step process, and give brief updates before each step. +- Only use the Weather_Tool for data. Never guess or make up information. +- Repeat the tool use for subsequent requests if necessary. +- If the tool errors, apologize, explain weather is unavailable, and suggest other options. +- Report temperatures in °C (°F) and wind in km/h (mph). Keep weather reports concise. Sparingly use + emojis where appropriate. +- Only respond to weather queries. Remind off-topic users of your purpose. +- Never claim to search online, access external data, or use tools besides Weather_Tool. +- Complete the entire process until you have all required data before sending the complete response. +``` + +#### Weather tool specification +For strongly typed languages, you will need to use the Bedrock classes provided for tool specification. + +``` +"toolSpec": { + "name": "Weather_Tool", + "description": "Get the current weather for a given location, based on its WGS84 coordinates.", + "inputSchema": { + "json": { + "type": "object", + "properties": { + "latitude": { + "type": "string", + "description": "Geographical WGS84 latitude of the location.", + }, + "longitude": { + "type": "string", + "description": "Geographical WGS84 longitude of the location.", + }, + }, + "required": ["latitude", "longitude"], + } + }, + } +``` + + +## Example Output +``` +******************************************************************************** + Welcome to the Amazon Bedrock Tool Use demo! +******************************************************************************** + + This assistant provides current weather information for user-specified locations. + You can ask for weather details by providing the location name or coordinates. + + Example queries: + - What's the weather like in New York? + - Current weather for latitude 40.70, longitude -74.01 + - Is it warmer in Rome or Barcelona today? + + To exit the program, simply type 'x' and press Enter. + + P.S.: You're not limited to single locations, or even to using English! + Have fun and experiment with the app! + +******************************************************************************** + Your weather info request: (x to exit): + +>What's the weather like in Oklahoma City? + Calling Bedrock... + The model's response: + +Okay, let me get the current weather information for Oklahoma City: + +1) I will look up the latitude and longitude coordinates for Oklahoma City. +2) Then I will use the Weather_Tool to get the weather data for those coordinates. + + + Invoking tool: Weather_Tool with input: 35.4676, -97.5164... + + Calling Bedrock... + The model's response: + +According to the weather data, the current conditions in Oklahoma City are: + +??? Partly cloudy +Temperature: 2.7°C (36.9°F) +Wind: 22.3 km/h (13.9 mph) from the North + +The wind is breezy and it's a bit cool for this time of year in Oklahoma City. I'd recommend wearing a jacket if going outside for extended periods. + +******************************************************************************** + Your weather info request: (x to exit): + +>What's the best kind of cat? + Calling Bedrock... + The model's response: + +I'm an AI assistant focused on providing current weather information using the available Weather_Tool. I don't have any data or capabilities related to discussing different types of cats. Perhaps we could return to discussing weather conditions for a particular location? I'd be happy to look up the latest forecast if you provide a city or geographic coordinates. + +******************************************************************************** + Your weather info request: (x to exit): + +>Where is the warmest city in Oklahoma right now? + Calling Bedrock... + The model's response: + +Okay, let me see if I can find the warmest city in Oklahoma right now using the Weather_Tool: + +1) I will look up the coordinates for some major cities in Oklahoma. +2) Then I will use the Weather_Tool to get the current temperature for each city. +3) I will compare the temperatures to determine the warmest city. + + + Invoking tool: Weather_Tool with input: 35.4676, -97.5164... + + Calling Bedrock... + The model's response: + +Oklahoma City: 2.7°C + + + Invoking tool: Weather_Tool with input: 36.1539, -95.9925... + + Calling Bedrock... + The model's response: + +Tulsa: 5.5°C + +Based on the data from the Weather_Tool, the warmest major city in Oklahoma right now is Tulsa at 5.5°C (41.9°F). + +******************************************************************************** + Your weather info request: (x to exit): + +>What's the warmest city in California right now? + Calling Bedrock... + The model's response: + +OK, let me check the current temperatures in some major cities in California to find the warmest one: + + + Invoking tool: Weather_Tool with input: 34.0522, -118.2437... + + Calling Bedrock... + The model's response: + +Los Angeles: 10.6°C (51.1°F) + + + Invoking tool: Weather_Tool with input: 37.7749, -122.4194... + + Calling Bedrock... + The model's response: + + + +San Francisco: 11.6°C (52.9°F) + + + Invoking tool: Weather_Tool with input: 32.7157, -117.1611... + + Calling Bedrock... + Warning: Maximum number of recursions reached. Please try again. + The model's response: + +San Diego: 12.9°C (55.2°F) + +Based on the data from the Weather_Tool, the warmest major city in California right now appears to be San Diego at 12.9°C (55.2°F). + +******************************************************************************** + Your weather info request: (x to exit): +>x +******************************************************************************** + Thank you for checking out the Amazon Bedrock Tool Use demo. We hope you + learned something new, or got some inspiration for your own apps today! + + For more Bedrock examples in different programming languages, have a look at: + https://docs.aws.amazon.com/bedrock/latest/userguide/service_code_examples.html +******************************************************************************** + +Amazon Bedrock Converse API with Tool Use Feature Scenario is complete. +-------------------------------------------------------------------------------- + +``` +- Cleanup + - There are no resources needing cleanup in this scenario. + +--- + +## Errors +In addition to handling Bedrock Runtime errors on the Converse action, the scenario should also +handle errors related to the tool itself, such as an HTTP Request failure. + +| action | Error | Handling | +|----------------|------------------------|------------------------------------------------------| +| `Converse` | ModelNotReady | Notify the user to try again, and stop the scenario. | +| `HTTP Request` | HttpRequestException | Notify the user and stop the scenario. | + +--- + +## Metadata +For languages which already have an entry for the action, add a description for the snippet describing the scenario or action. + +| action / scenario | metadata file | metadata key | +|--------------------------------------------|--------------------------------|------------------------------------------------------| +| `Converse` | bedrock-runtime_metadata.yaml | bedrock-runtime_Converse_AmazonNovaText | +| `Tool use with the Converse API` | bedrock-runtime_metadata.yaml | bedrock-runtime_Scenario_ToolUse | +| `Scenario: Tool use with the Converse API` | bedrock-runtime_metadata.yaml | bedrock-runtime_Scenario_ToolUseDemo_AmazonNova | + + diff --git a/scenarios/features/bedrock_converse_tool/toolscenario.png b/scenarios/features/bedrock_converse_tool/toolscenario.png new file mode 100644 index 00000000000..45defe47143 Binary files /dev/null and b/scenarios/features/bedrock_converse_tool/toolscenario.png differ diff --git a/swift/example_code/s3/checksums/multipart/Package.swift b/swift/example_code/s3/checksums/multipart/Package.swift new file mode 100644 index 00000000000..cbe56d024a5 --- /dev/null +++ b/swift/example_code/s3/checksums/multipart/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "mpchecksums", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "mpchecksums", + dependencies: [ + .product(name: "AWSS3", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser"), + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/s3/checksums/multipart/Sources/TransferError.swift b/swift/example_code/s3/checksums/multipart/Sources/TransferError.swift new file mode 100644 index 00000000000..9b413dc1511 --- /dev/null +++ b/swift/example_code/s3/checksums/multipart/Sources/TransferError.swift @@ -0,0 +1,31 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/// Errors thrown by the example's functions. +enum TransferError: Error { + /// The checksum is missing or erroneous. + case checksumError + /// An error occurred when completing a multi-part upload to Amazon S3. + case multipartFinishError(_ message: String = "") + /// An error occurred when starting a multi-part upload to Amazon S3. + case multipartStartError + /// An error occurred while uploading a file to Amazon S3. + case uploadError(_ message: String = "") + /// An error occurred while reading the file's contents. + case readError + + var errorDescription: String? { + switch self { + case .checksumError: + return "The checksum is missing or incorrect" + case .multipartFinishError(message: let message): + return "An error occurred when completing a multi-part upload to Amazon S3. \(message)" + case .multipartStartError: + return "An error occurred when starting a multi-part upload to Amazon S3." + case .uploadError(message: let message): + return "An error occurred attempting to upload the file: \(message)" + case .readError: + return "An error occurred while reading the file data" + } + } +} diff --git a/swift/example_code/s3/checksums/multipart/Sources/entry.swift b/swift/example_code/s3/checksums/multipart/Sources/entry.swift new file mode 100644 index 00000000000..afa18749230 --- /dev/null +++ b/swift/example_code/s3/checksums/multipart/Sources/entry.swift @@ -0,0 +1,243 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +/// An example demonstrating how to perform multi-part uploads to Amazon S3 +/// using the AWS SDK for Swift. + +// snippet-start:[swift.s3.mp-checksums.imports] +import ArgumentParser +import AWSClientRuntime +import AWSS3 +import Foundation +import Smithy +// snippet-end:[swift.s3.mp-checksums.imports] + +// -MARK: - Async command line tool + +struct ExampleCommand: ParsableCommand { + // -MARK: Command arguments + @Option(help: "Path of local file to upload to Amazon S3") + var file: String + @Option(help: "Name of the Amazon S3 bucket to upload to") + var bucket: String + @Option(help: "Key name to give the file on Amazon S3") + var key: String? + @Option(help: "Name of the Amazon S3 Region to use") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "mpchecksums", + abstract: """ + This example shows how to use checksums with multi-part uploads. + """, + discussion: """ + """ + ) + + // -MARK: - File uploading + + // snippet-start:[swift.s3.mp-checksums.uploadfile] + /// Upload a file to Amazon S3. + /// + /// - Parameters: + /// - file: The path of the local file to upload to Amazon S3. + /// - bucket: The name of the bucket to upload the file into. + /// - key: The key (name) to give the object on Amazon S3. + /// + /// - Throws: Errors from `TransferError` + func uploadFile(file: String, bucket: String, key: String?) async throws { + let fileURL = URL(fileURLWithPath: file) + let fileName: String + + // If no key was provided, use the last component of the filename. + + if key == nil { + fileName = fileURL.lastPathComponent + } else { + fileName = key! + } + + // Create an Amazon S3 client in the desired Region. + + let config = try await S3Client.S3ClientConfiguration(region: region) + let s3Client = S3Client(config: config) + + print("Uploading file from \(fileURL.path) to \(bucket)/\(fileName).") + + let multiPartUploadOutput: CreateMultipartUploadOutput + + // First, create the multi-part upload, using SHA256 checksums. + + do { + multiPartUploadOutput = try await s3Client.createMultipartUpload( + input: CreateMultipartUploadInput( + bucket: bucket, + checksumAlgorithm: .sha256, + key: key + ) + ) + } catch { + throw TransferError.multipartStartError + } + + // Get the upload ID. This needs to be included with each part sent. + + guard let uploadID = multiPartUploadOutput.uploadId else { + throw TransferError.uploadError("Unable to get the upload ID") + } + + // Open a file handle and prepare to send the file in chunks. Each chunk + // is 5 MB, which is the minimum size allowed by Amazon S3. + + do { + let blockSize = Int(5 * 1024 * 1024) + let fileHandle = try FileHandle(forReadingFrom: fileURL) + let fileSize = try getFileSize(file: fileHandle) + let blockCount = Int(ceil(Double(fileSize) / Double(blockSize))) + var completedParts: [S3ClientTypes.CompletedPart] = [] + + // Upload the blocks one at as Amazon S3 object parts. + + print("Uploading...") + + for partNumber in 1...blockCount { + let data: Data + let startIndex = UInt64(partNumber - 1) * UInt64(blockSize) + + // Read the block from the file. + + data = try readFileBlock(file: fileHandle, startIndex: startIndex, size: blockSize) + + let uploadPartInput = UploadPartInput( + body: ByteStream.data(data), + bucket: bucket, + checksumAlgorithm: .sha256, + key: key, + partNumber: partNumber, + uploadId: uploadID + ) + + // Upload the part with a SHA256 checksum. + + do { + let uploadPartOutput = try await s3Client.uploadPart(input: uploadPartInput) + + guard let eTag = uploadPartOutput.eTag else { + throw TransferError.uploadError("Missing eTag") + } + guard let checksum = uploadPartOutput.checksumSHA256 else { + throw TransferError.checksumError + } + print("Part \(partNumber) checksum: \(checksum)") + + // Append the completed part description (including its + // checksum, ETag, and part number) to the + // `completedParts` array. + + completedParts.append( + S3ClientTypes.CompletedPart( + checksumSHA256: checksum, + eTag: eTag, + partNumber: partNumber + ) + ) + } catch { + throw TransferError.uploadError(error.localizedDescription) + } + } + + // Tell Amazon S3 that all parts have been uploaded. + + do { + let partInfo = S3ClientTypes.CompletedMultipartUpload(parts: completedParts) + let multiPartCompleteInput = CompleteMultipartUploadInput( + bucket: bucket, + key: key, + multipartUpload: partInfo, + uploadId: uploadID + ) + _ = try await s3Client.completeMultipartUpload(input: multiPartCompleteInput) + } catch { + throw TransferError.multipartFinishError(error.localizedDescription) + } + } catch { + throw TransferError.uploadError("Error uploading the file: \(error)") + } + + print("Done. Uploaded as \(fileName) in bucket \(bucket).") + } + // snippet-end:[swift.s3.mp-checksums.uploadfile] + + // -MARK: - File access + + /// Get the size of a file in bytes. + /// + /// - Parameter file: `FileHandle` identifying the file to return the size of. + /// + /// - Returns: The number of bytes in the file. + func getFileSize(file: FileHandle) throws -> UInt64 { + let fileSize: UInt64 + + // Get the total size of the file in bytes, then compute the number + // of blocks it will take to transfer the whole file. + + do { + try file.seekToEnd() + fileSize = try file.offset() + } catch { + throw TransferError.readError + } + return fileSize + } + + /// Read the specified range of bytes from a file and return them in a + /// new `Data` object. + /// + /// - Parameters: + /// - file: The `FileHandle` to read from. + /// - startIndex: The index of the first byte to read. + /// - size: The number of bytes to read. + /// + /// - Returns: A new `Data` object containing the specified range of bytes. + /// + /// - Throws: `TransferError.readError` if the read fails. + func readFileBlock(file: FileHandle, startIndex: UInt64, size: Int) throws -> Data { + file.seek(toFileOffset: startIndex) + do { + let data = try file.read(upToCount: size) + guard let data else { + throw TransferError.readError + } + return data + } catch { + throw TransferError.readError + } + } + + // -MARK: - Asynchronous main code + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + try await uploadFile(file: file, bucket: bucket, + key: key) + } +} + +// -MARK: - Entry point + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch let error as TransferError { + print("ERROR: \(error.errorDescription ?? "Unknown error")") + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/s3/checksums/upload/Package.swift b/swift/example_code/s3/checksums/upload/Package.swift new file mode 100644 index 00000000000..4fff4a7e98d --- /dev/null +++ b/swift/example_code/s3/checksums/upload/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "checksums", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "checksums", + dependencies: [ + .product(name: "AWSS3", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/s3/checksums/upload/Sources/TransferError.swift b/swift/example_code/s3/checksums/upload/Sources/TransferError.swift new file mode 100644 index 00000000000..f05b3640a39 --- /dev/null +++ b/swift/example_code/s3/checksums/upload/Sources/TransferError.swift @@ -0,0 +1,47 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 + +/// Errors thrown by the example's functions. +enum TransferError: Error { + /// The destination directory for a download is missing or inaccessible. + case directoryError + /// An error occurred while downloading a file from Amazon S3. + case downloadError(_ message: String = "") + /// An error occurred moving the file to its final destination. + case fileMoveError + /// An error occurred when completing a multi-part upload to Amazon S3. + case multipartFinishError + /// An error occurred when starting a multi-part upload to Amazon S3. + case multipartStartError + /// An error occurred while uploading a file to Amazon S3. + case uploadError(_ message: String = "") + /// An error occurred while reading the file's contents. + case readError + /// An error occurred while presigning the URL. + case signingError + /// An error occurred while writing the file's contents. + case writeError + + var errorDescription: String? { + switch self { + case .directoryError: + return "The destination directory could not be located or created" + case .downloadError(message: let message): + return "An error occurred attempting to download the file: \(message)" + case .fileMoveError: + return "The file couldn't be moved to the destination directory" + case .multipartFinishError: + return "An error occurred when completing a multi-part upload to Amazon S3." + case .multipartStartError: + return "An error occurred when starting a multi-part upload to Amazon S3." + case .uploadError(message: let message): + return "An error occurred attempting to upload the file: \(message)" + case .readError: + return "An error occurred while reading the file data" + case .signingError: + return "An error occurred while pre-signing the URL" + case .writeError: + return "An error occurred while writing the file data" + } + } +} diff --git a/swift/example_code/s3/checksums/upload/Sources/entry.swift b/swift/example_code/s3/checksums/upload/Sources/entry.swift new file mode 100644 index 00000000000..3442ff8ec1e --- /dev/null +++ b/swift/example_code/s3/checksums/upload/Sources/entry.swift @@ -0,0 +1,107 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +/// An example demonstrating how to configure checksums when uploading to +/// Amazon S3. + +// snippet-start:[swift.s3.checksums-upload.imports] +import ArgumentParser +import AWSClientRuntime +import AWSS3 +import Foundation +import Smithy +// snippet-end:[swift.s3.checksums-upload.imports] + +// -MARK: - Async command line tool + +struct ExampleCommand: ParsableCommand { + // -MARK: Command arguments + @Option(help: "Path of local file to upload to Amazon S3") + var source: String + @Option(help: "Name of the Amazon S3 bucket to upload to") + var bucket: String + @Option(help: "Destination file path within the bucket") + var dest: String? + @Option(help: "Name of the Amazon S3 Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "checksums", + abstract: """ + This example shows how to configure checksums when uploading to Amazon S3. + """, + discussion: """ + """ + ) + + // -MARK: - File upload + + func uploadFile(sourcePath: String, bucket: String, key: String?) async throws { + let fileURL = URL(fileURLWithPath: sourcePath) + let fileName: String + + // If no key was provided, use the last component of the filename. + + if key == nil { + fileName = fileURL.lastPathComponent + } else { + fileName = key! + } + + // Create an Amazon S3 client in the desired Region. + + let config = try await S3Client.S3ClientConfiguration(region: region) + let s3Client = S3Client(config: config) + + print("Uploading file from \(fileURL.path) to \(bucket)/\(fileName).") + + let fileData = try Data(contentsOf: fileURL) + let dataStream = ByteStream.data(fileData) + + // Use PutObject to send the file to Amazon S3. The checksum is + // specified by setting the `checksumAlgorithm` property. In this + // example, SHA256 is used. + + do { + // snippet-start:[swift.s3.checksums.upload-file] + _ = try await s3Client.putObject( + input: PutObjectInput( + body: dataStream, + bucket: bucket, + checksumAlgorithm: .sha256, + key: fileName + ) + ) + // snippet-end:[swift.s3.checksums.upload-file] + } catch { + throw TransferError.uploadError("Error uploading file: \(error.localizedDescription)") + } + print("Uploaded \(sourcePath) to \(bucket)/\(fileName).") + } + + // -MARK: - Asynchronous main code + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + try await uploadFile(sourcePath: source, bucket: bucket, key: dest) + } +} + +// -MARK: - Entry point + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch let error as TransferError { + print("ERROR: \(error.errorDescription ?? "Unknown error")") + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/s3/multipart-upload/Sources/TransferError.swift b/swift/example_code/s3/multipart-upload/Sources/TransferError.swift index afaa2101372..9b413dc1511 100644 --- a/swift/example_code/s3/multipart-upload/Sources/TransferError.swift +++ b/swift/example_code/s3/multipart-upload/Sources/TransferError.swift @@ -3,6 +3,8 @@ /// Errors thrown by the example's functions. enum TransferError: Error { + /// The checksum is missing or erroneous. + case checksumError /// An error occurred when completing a multi-part upload to Amazon S3. case multipartFinishError(_ message: String = "") /// An error occurred when starting a multi-part upload to Amazon S3. @@ -14,6 +16,8 @@ enum TransferError: Error { var errorDescription: String? { switch self { + case .checksumError: + return "The checksum is missing or incorrect" case .multipartFinishError(message: let message): return "An error occurred when completing a multi-part upload to Amazon S3. \(message)" case .multipartStartError: diff --git a/swift/example_code/s3/multipart-upload/Sources/entry.swift b/swift/example_code/s3/multipart-upload/Sources/entry.swift index 8054e26c5e9..e1854157147 100644 --- a/swift/example_code/s3/multipart-upload/Sources/entry.swift +++ b/swift/example_code/s3/multipart-upload/Sources/entry.swift @@ -6,7 +6,6 @@ // snippet-start:[swift.s3.multipart-upload.imports] import ArgumentParser -import AsyncHTTPClient import AWSClientRuntime import AWSS3 import Foundation @@ -38,6 +37,14 @@ struct ExampleCommand: ParsableCommand { // -MARK: - File uploading + /// Upload a file to Amazon S3. + /// + /// - Parameters: + /// - file: The path of the local file to upload to Amazon S3. + /// - bucket: The name of the bucket to upload the file into. + /// - key: The key (name) to give the object on Amazon S3. + /// + /// - Throws: Errors from `TransferError` func uploadFile(file: String, bucket: String, key: String?) async throws { let fileURL = URL(fileURLWithPath: file) let fileName: String @@ -175,13 +182,18 @@ struct ExampleCommand: ParsableCommand { uploadId: uploadID ) + // Upload the part. do { - let uploadPartOutput = try await client.uploadPart(input: uploadPartInput) + let uploadPartOutput = try await client.uploadPart(input: uploadPartInput) + guard let eTag = uploadPartOutput.eTag else { throw TransferError.uploadError("Missing eTag") } - return S3ClientTypes.CompletedPart(eTag: eTag, partNumber: partNumber) + return S3ClientTypes.CompletedPart( + eTag: eTag, + partNumber: partNumber + ) } catch { throw TransferError.uploadError(error.localizedDescription) } diff --git a/swift/example_code/s3/presigned-urls/Sources/presigned-upload/TransferError.swift b/swift/example_code/s3/presigned-urls/Sources/presigned-upload/TransferError.swift index f05b3640a39..f9bd8d2749d 100644 --- a/swift/example_code/s3/presigned-urls/Sources/presigned-upload/TransferError.swift +++ b/swift/example_code/s3/presigned-urls/Sources/presigned-upload/TransferError.swift @@ -3,45 +3,17 @@ /// Errors thrown by the example's functions. enum TransferError: Error { - /// The destination directory for a download is missing or inaccessible. - case directoryError - /// An error occurred while downloading a file from Amazon S3. - case downloadError(_ message: String = "") - /// An error occurred moving the file to its final destination. - case fileMoveError - /// An error occurred when completing a multi-part upload to Amazon S3. - case multipartFinishError - /// An error occurred when starting a multi-part upload to Amazon S3. - case multipartStartError /// An error occurred while uploading a file to Amazon S3. case uploadError(_ message: String = "") /// An error occurred while reading the file's contents. case readError - /// An error occurred while presigning the URL. - case signingError - /// An error occurred while writing the file's contents. - case writeError var errorDescription: String? { switch self { - case .directoryError: - return "The destination directory could not be located or created" - case .downloadError(message: let message): - return "An error occurred attempting to download the file: \(message)" - case .fileMoveError: - return "The file couldn't be moved to the destination directory" - case .multipartFinishError: - return "An error occurred when completing a multi-part upload to Amazon S3." - case .multipartStartError: - return "An error occurred when starting a multi-part upload to Amazon S3." case .uploadError(message: let message): return "An error occurred attempting to upload the file: \(message)" case .readError: return "An error occurred while reading the file data" - case .signingError: - return "An error occurred while pre-signing the URL" - case .writeError: - return "An error occurred while writing the file data" } } } diff --git a/swift/example_code/sns/CreateTopic/Package.swift b/swift/example_code/sns/CreateTopic/Package.swift new file mode 100644 index 00000000000..780a718433e --- /dev/null +++ b/swift/example_code/sns/CreateTopic/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "createtopic", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "createtopic", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/CreateTopic/Sources/entry.swift b/swift/example_code/sns/CreateTopic/Sources/entry.swift new file mode 100644 index 00000000000..636acc2227c --- /dev/null +++ b/swift/example_code/sns/CreateTopic/Sources/entry.swift @@ -0,0 +1,60 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Notification +// Service (SNS) client to create a new topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "Name to give the new Amazon SNS topic") + var name: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "createtopic", + abstract: """ + This example shows how to create an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.CreateTopic] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + let output = try await snsClient.createTopic( + input: CreateTopicInput(name: name) + ) + + guard let arn = output.topicArn else { + print("No topic ARN returned by Amazon SNS.") + return + } + // snippet-end:[swift.sns.CreateTopic] + + print("New topic created with ARN: \(arn)") + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/DeleteTopic/Package.swift b/swift/example_code/sns/DeleteTopic/Package.swift new file mode 100644 index 00000000000..2ce4a387ecd --- /dev/null +++ b/swift/example_code/sns/DeleteTopic/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "deletetopic", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "deletetopic", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/DeleteTopic/Sources/entry.swift b/swift/example_code/sns/DeleteTopic/Sources/entry.swift new file mode 100644 index 00000000000..bd8ba43e54f --- /dev/null +++ b/swift/example_code/sns/DeleteTopic/Sources/entry.swift @@ -0,0 +1,55 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Notification +// Service (SNS) client to delete a topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The ARN of the Amazon SNS topic to delete") + var arn: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "deletetopic", + abstract: """ + This example shows how to delete an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.DeleteTopic] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + _ = try await snsClient.deleteTopic( + input: DeleteTopicInput(topicArn: arn) + ) + // snippet-end:[swift.sns.DeleteTopic] + + print("Topic deleted.") + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/Publish/Package.swift b/swift/example_code/sns/Publish/Package.swift new file mode 100644 index 00000000000..b338d31b867 --- /dev/null +++ b/swift/example_code/sns/Publish/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "publish", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "publish", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/Publish/Sources/entry.swift b/swift/example_code/sns/Publish/Sources/entry.swift new file mode 100644 index 00000000000..cfc582cbb44 --- /dev/null +++ b/swift/example_code/sns/Publish/Sources/entry.swift @@ -0,0 +1,64 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to publish a message to an Amazon SNS topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The ARN of the Amazon SNS topic to publish to") + var arn: String + @Argument(help: "The message to publish to the topic") + var message: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "publish", + abstract: """ + This example shows how to publish a message to an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.Publish] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + let output = try await snsClient.publish( + input: PublishInput( + message: message, + topicArn: arn + ) + ) + + guard let messageId = output.messageId else { + print("No message ID received from Amazon SNS.") + return + } + + print("Published message with ID \(messageId)") + // snippet-end:[swift.sns.Publish] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/README.md b/swift/example_code/sns/README.md new file mode 100644 index 00000000000..ba494a4ca23 --- /dev/null +++ b/swift/example_code/sns/README.md @@ -0,0 +1,103 @@ +# Amazon SNS code examples for the SDK for Swift + +## Overview + +Shows how to use the AWS SDK for Swift to work with Amazon Simple Notification Service (Amazon SNS). + + + + +_Amazon SNS is a web service that enables applications, end-users, and devices to instantly send and receive notifications from the cloud._ + +## ⚠ Important + +* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + + + + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../../README.md#Prerequisites) in the `swift` folder. + + + + + +### Get started + +- [Hello Amazon SNS](basics/Package.swift#L8) (`ListTopics`) + + +### Single actions + +Code excerpts that show you how to call individual service functions. + +- [CreateTopic](CreateTopic/Sources/entry.swift#L29) +- [DeleteTopic](DeleteTopic/Sources/entry.swift#L29) +- [ListTopics](basics/Sources/entry.swift#L28) +- [Publish](Publish/Sources/entry.swift#L30) +- [Subscribe](SubscribeEmail/Sources/entry.swift#L31) +- [Unsubscribe](Unsubscribe/Sources/entry.swift#L29) + + + + + +## Run the examples + +### Instructions + +To build any of these examples from a terminal window, navigate into its +directory, then use the following command: + +``` +$ swift build +``` + +To build one of these examples in Xcode, navigate to the example's directory +(such as the `ListUsers` directory, to build that example). Then type `xed.` +to open the example directory in Xcode. You can then use standard Xcode build +and run commands. + + + + +#### Hello Amazon SNS + +This example shows you how to get started using Amazon SNS. + + + +### Tests + +⚠ Running tests might result in charges to your AWS account. + + +To find instructions for running these tests, see the [README](../../README.md#Tests) +in the `swift` folder. + + + + + + +## Additional resources + +- [Amazon SNS Developer Guide](https://docs.aws.amazon.com/sns/latest/dg/welcome.html) +- [Amazon SNS API Reference](https://docs.aws.amazon.com/sns/latest/api/welcome.html) +- [SDK for Swift Amazon SNS reference](https://sdk.amazonaws.com/swift/api/awssns/latest/documentation/awssns) + + + + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 diff --git a/swift/example_code/sns/SubscribeEmail/Package.swift b/swift/example_code/sns/SubscribeEmail/Package.swift new file mode 100644 index 00000000000..025dd7f225c --- /dev/null +++ b/swift/example_code/sns/SubscribeEmail/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "subscribe-email", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "subscribe-email", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/SubscribeEmail/Sources/entry.swift b/swift/example_code/sns/SubscribeEmail/Sources/entry.swift new file mode 100644 index 00000000000..298819f629c --- /dev/null +++ b/swift/example_code/sns/SubscribeEmail/Sources/entry.swift @@ -0,0 +1,67 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to subscribe an email address to an Amazon Simple +// Notification Service (SNS) topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The ARN of the Amazon SNS topic to subscribe to") + var arn: String + @Argument(help: "The email address to subscribe to the topic") + var email: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "subscribe-email", + abstract: """ + Subscribes an email address to an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.SubscribeEmail] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + let output = try await snsClient.subscribe( + input: SubscribeInput( + endpoint: email, + protocol: "email", + returnSubscriptionArn: true, + topicArn: arn + ) + ) + + guard let subscriptionArn = output.subscriptionArn else { + print("No subscription ARN received from Amazon SNS.") + return + } + + print("Subscription \(subscriptionArn) created.") + // snippet-end:[swift.sns.SubscribeEmail] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/SubscribeSMS/Package.swift b/swift/example_code/sns/SubscribeSMS/Package.swift new file mode 100644 index 00000000000..fcc48c7a38f --- /dev/null +++ b/swift/example_code/sns/SubscribeSMS/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "subscribe-sms", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "subscribe-sms", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/SubscribeSMS/Sources/entry.swift b/swift/example_code/sns/SubscribeSMS/Sources/entry.swift new file mode 100644 index 00000000000..5a1b521e474 --- /dev/null +++ b/swift/example_code/sns/SubscribeSMS/Sources/entry.swift @@ -0,0 +1,67 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to subscribe an email address to an Amazon Simple +// Notification Service (SNS) topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The ARN of the Amazon SNS topic to subscribe to") + var arn: String + @Argument(help: "The phone number to subscribe to the topic") + var phone: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "subscribe-sms", + abstract: """ + Subscribes a phone number to receive text messages from an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.SubscribeSMS] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + let output = try await snsClient.subscribe( + input: SubscribeInput( + endpoint: phone, + protocol: "sms", + returnSubscriptionArn: true, + topicArn: arn + ) + ) + + guard let subscriptionArn = output.subscriptionArn else { + print("No subscription ARN received from Amazon SNS.") + return + } + + print("Subscription \(subscriptionArn) created.") + // snippet-end:[swift.sns.SubscribeSMS] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/Unsubscribe/Package.swift b/swift/example_code/sns/Unsubscribe/Package.swift new file mode 100644 index 00000000000..45532bd83a7 --- /dev/null +++ b/swift/example_code/sns/Unsubscribe/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "unsubscribe", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "unsubscribe", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sns/Unsubscribe/Sources/entry.swift b/swift/example_code/sns/Unsubscribe/Sources/entry.swift new file mode 100644 index 00000000000..b7ba9bc81ab --- /dev/null +++ b/swift/example_code/sns/Unsubscribe/Sources/entry.swift @@ -0,0 +1,57 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to unsubscribe a subscriber from an Amazon Simple +// Notification Service (SNS) topic. + +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The ARN of the subscriber to unsubscribe") + var arn: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "unsubscribe", + abstract: """ + Unsubscribe a subscriber from an Amazon SNS topic. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.Unsubscribe] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + _ = try await snsClient.unsubscribe( + input: UnsubscribeInput( + subscriptionArn: arn + ) + ) + + print("Unsubscribed.") + // snippet-end:[swift.sns.Unsubscribe] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sns/basics/Package.swift b/swift/example_code/sns/basics/Package.swift new file mode 100644 index 00000000000..d8a60c12d4e --- /dev/null +++ b/swift/example_code/sns/basics/Package.swift @@ -0,0 +1,42 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +// snippet-start:[swift.sns.basics.package] +import PackageDescription + +let package = Package( + name: "sns-basics", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "sns-basics", + dependencies: [ + .product(name: "AWSSNS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) +// snippet-end:[swift.sns.basics.package] diff --git a/swift/example_code/sns/basics/Sources/entry.swift b/swift/example_code/sns/basics/Sources/entry.swift new file mode 100644 index 00000000000..446fa749c8d --- /dev/null +++ b/swift/example_code/sns/basics/Sources/entry.swift @@ -0,0 +1,79 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Notification +// Service client to list your available Amazon SNS topics. + +// snippet-start:[swift.sns.basics.hello] +import ArgumentParser +import AWSClientRuntime +import AWSSNS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "sns-basics", + abstract: """ + This example shows how to list all of your available Amazon SNS topics. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sns.ListTopics] + let config = try await SNSClient.SNSClientConfiguration(region: region) + let snsClient = SNSClient(config: config) + + var topics: [String] = [] + let outputPages = snsClient.listTopicsPaginated( + input: ListTopicsInput() + ) + + // Each time a page of results arrives, process its contents. + + for try await output in outputPages { + guard let topicList = output.topics else { + print("Unable to get a page of Amazon SNS topics.") + return + } + + // Iterate over the topics listed on this page, adding their ARNs + // to the `topics` array. + + for topic in topicList { + guard let arn = topic.topicArn else { + print("Topic has no ARN.") + return + } + topics.append(arn) + } + } + // snippet-end:[swift.sns.ListTopics] + + print("You have \(topics.count) topics:") + for topic in topics { + print(" \(topic)") + } + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} +// snippet-end:[swift.sns.basics.hello] diff --git a/swift/example_code/sqs/CreateQueue/Package.swift b/swift/example_code/sqs/CreateQueue/Package.swift new file mode 100644 index 00000000000..bf475e99d8b --- /dev/null +++ b/swift/example_code/sqs/CreateQueue/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "createqueue", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "createqueue", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/CreateQueue/Sources/entry.swift b/swift/example_code/sqs/CreateQueue/Sources/entry.swift new file mode 100644 index 00000000000..afb2b83fe48 --- /dev/null +++ b/swift/example_code/sqs/CreateQueue/Sources/entry.swift @@ -0,0 +1,61 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Queue +// Service client to create an available Amazon SQS queue. + +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The name of the Amazon SQS queue to create") + var queueName: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "createqueue", + abstract: """ + This example shows how to create a new Amazon SQS queue. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.CreateQueue] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + let output = try await sqsClient.createQueue( + input: CreateQueueInput( + queueName: queueName + ) + ) + + guard let queueUrl = output.queueUrl else { + print("No queue URL returned.") + return + } + // snippet-end:[swift.sqs.CreateQueue] + print("Created queue named \(queueName) with URL \(queueUrl).") + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sqs/DeleteMessageBatch/Package.swift b/swift/example_code/sqs/DeleteMessageBatch/Package.swift new file mode 100644 index 00000000000..13a005cf19f --- /dev/null +++ b/swift/example_code/sqs/DeleteMessageBatch/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "deletemessages", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "deletemessages", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/DeleteMessageBatch/Sources/entry.swift b/swift/example_code/sqs/DeleteMessageBatch/Sources/entry.swift new file mode 100644 index 00000000000..d76157fe98b --- /dev/null +++ b/swift/example_code/sqs/DeleteMessageBatch/Sources/entry.swift @@ -0,0 +1,116 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Queue +// Service client to delete messages from an Amazon SQS queue. + +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Option(help: "The URL of the Amazon SQS queue from which to delete messages") + var queue: String + @Argument(help: "Receipt handle(s) of the message(s) to delete") + var handles: [String] + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "deletemessages", + abstract: """ + This example shows how to delete a batch of messages from an Amazon SQS queue. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.DeleteMessageBatch] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + // Create the list of message entries. + + var entries: [SQSClientTypes.DeleteMessageBatchRequestEntry] = [] + var messageNumber = 1 + + for handle in handles { + let entry = SQSClientTypes.DeleteMessageBatchRequestEntry( + id: "\(messageNumber)", + receiptHandle: handle + ) + entries.append(entry) + messageNumber += 1 + } + + // Delete the messages. + + let output = try await sqsClient.deleteMessageBatch( + input: DeleteMessageBatchInput( + entries: entries, + queueUrl: queue + ) + ) + + // Get the lists of failed and successful deletions from the output. + + guard let failedEntries = output.failed else { + print("Failed deletion list is missing!") + return + } + guard let successfulEntries = output.successful else { + print("Successful deletion list is missing!") + return + } + + // Display a list of the failed deletions along with their + // corresponding explanation messages. + + if failedEntries.count != 0 { + print("Failed deletions:") + + for entry in failedEntries { + print("Message #\(entry.id ?? "") failed: \(entry.message ?? "")") + } + } else { + print("No failed deletions.") + } + + // Output a list of the message numbers that were successfully deleted. + + if successfulEntries.count != 0 { + var successes = "" + + for entry in successfulEntries { + if successes.count == 0 { + successes = entry.id ?? "" + } else { + successes = "\(successes), \(entry.id ?? "")" + } + } + print("Succeeded: ", successes) + } else { + print("No successful deletions.") + } + + // snippet-end:[swift.sqs.DeleteMessageBatch] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sqs/DeleteQueue/Package.swift b/swift/example_code/sqs/DeleteQueue/Package.swift new file mode 100644 index 00000000000..37fc04c96dc --- /dev/null +++ b/swift/example_code/sqs/DeleteQueue/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "deletequeue", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "deletequeue", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/DeleteQueue/Sources/entry.swift b/swift/example_code/sqs/DeleteQueue/Sources/entry.swift new file mode 100644 index 00000000000..4bb0a546e39 --- /dev/null +++ b/swift/example_code/sqs/DeleteQueue/Sources/entry.swift @@ -0,0 +1,61 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to delete an Amazon SQS queue. + +// snippet-start:[swift.sqs.basics] +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The URL of the Amazon SQS queue to delete") + var queueUrl: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "deletequeue", + abstract: """ + This example shows how to delete an Amazon SQS queue. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.DeleteQueue] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + do { + _ = try await sqsClient.deleteQueue( + input: DeleteQueueInput( + queueUrl: queueUrl + ) + ) + } catch _ as AWSSQS.QueueDoesNotExist { + print("Error: The specified queue doesn't exist.") + return + } + // snippet-end:[swift.sqs.DeleteQueue] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} +// snippet-end:[swift.sqs.basics] diff --git a/swift/example_code/sqs/GetQueueAttributes/Package.swift b/swift/example_code/sqs/GetQueueAttributes/Package.swift new file mode 100644 index 00000000000..a46032ef803 --- /dev/null +++ b/swift/example_code/sqs/GetQueueAttributes/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "getqueueattributes", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "getqueueattributes", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/GetQueueAttributes/Sources/entry.swift b/swift/example_code/sqs/GetQueueAttributes/Sources/entry.swift new file mode 100644 index 00000000000..1255f3c7eb0 --- /dev/null +++ b/swift/example_code/sqs/GetQueueAttributes/Sources/entry.swift @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Queue +// Service client to get the attributes of an available Amazon SQS queue. + +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The URL of the Amazon SQS queue to get the attributes of") + var url: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "getqueueattributes", + abstract: """ + This example shows how to get an Amazon SQS queue's attributes. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.GetQueueAttributes] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + let output = try await sqsClient.getQueueAttributes( + input: GetQueueAttributesInput( + attributeNames: [ + .approximatenumberofmessages, + .maximummessagesize + ], + queueUrl: url + ) + ) + + guard let attributes = output.attributes else { + print("No queue attributes returned.") + return + } + + for (attr, value) in attributes { + switch(attr) { + case "ApproximateNumberOfMessages": + print("Approximate message count: \(value)") + case "MaximumMessageSize": + print("Maximum message size: \(value)kB") + default: + continue + } + } + // snippet-end:[swift.sqs.GetQueueAttributes] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sqs/README.md b/swift/example_code/sqs/README.md new file mode 100644 index 00000000000..f9a0e79e12b --- /dev/null +++ b/swift/example_code/sqs/README.md @@ -0,0 +1,104 @@ +# Amazon SQS code examples for the SDK for Swift + +## Overview + +Shows how to use the AWS SDK for Swift to work with Amazon Simple Queue Service (Amazon SQS). + + + + +_Amazon SQS is a fully managed message queuing service that makes it easy to decouple and scale microservices, distributed systems, and serverless applications._ + +## ⚠ Important + +* Running this code might result in charges to your AWS account. For more details, see [AWS Pricing](https://aws.amazon.com/pricing/) and [Free Tier](https://aws.amazon.com/free/). +* Running the tests might result in charges to your AWS account. +* We recommend that you grant your code least privilege. At most, grant only the minimum permissions required to perform the task. For more information, see [Grant least privilege](https://docs.aws.amazon.com/IAM/latest/UserGuide/best-practices.html#grant-least-privilege). +* This code is not tested in every AWS Region. For more information, see [AWS Regional Services](https://aws.amazon.com/about-aws/global-infrastructure/regional-product-services). + + + + +## Code examples + +### Prerequisites + +For prerequisites, see the [README](../../README.md#Prerequisites) in the `swift` folder. + + + + + +### Get started + +- [Hello Amazon SQS](basics/Package.swift#L8) (`ListQueues`) + + +### Single actions + +Code excerpts that show you how to call individual service functions. + +- [CreateQueue](CreateQueue/Sources/entry.swift#L29) +- [DeleteMessageBatch](DeleteMessageBatch/Sources/entry.swift#L31) +- [DeleteQueue](DeleteQueue/Sources/entry.swift#L29) +- [GetQueueAttributes](GetQueueAttributes/Sources/entry.swift#L29) +- [ListQueues](basics/Sources/entry.swift#L28) +- [ReceiveMessage](ReceiveMessage/Sources/entry.swift#L31) +- [SetQueueAttributes](SetQueueAttributes/Sources/entry.swift#L32) + + + + + +## Run the examples + +### Instructions + +To build any of these examples from a terminal window, navigate into its +directory, then use the following command: + +``` +$ swift build +``` + +To build one of these examples in Xcode, navigate to the example's directory +(such as the `ListUsers` directory, to build that example). Then type `xed.` +to open the example directory in Xcode. You can then use standard Xcode build +and run commands. + + + + +#### Hello Amazon SQS + +This example shows you how to get started using Amazon SQS. + + + +### Tests + +⚠ Running tests might result in charges to your AWS account. + + +To find instructions for running these tests, see the [README](../../README.md#Tests) +in the `swift` folder. + + + + + + +## Additional resources + +- [Amazon SQS Developer Guide](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/SQSDeveloperGuide/welcome.html) +- [Amazon SQS API Reference](https://docs.aws.amazon.com/AWSSimpleQueueService/latest/APIReference/Welcome.html) +- [SDK for Swift Amazon SQS reference](https://sdk.amazonaws.com/swift/api/awssqs/latest/documentation/awssqs) + + + + +--- + +Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. + +SPDX-License-Identifier: Apache-2.0 diff --git a/swift/example_code/sqs/ReceiveMessage/Package.swift b/swift/example_code/sqs/ReceiveMessage/Package.swift new file mode 100644 index 00000000000..12fac47b32e --- /dev/null +++ b/swift/example_code/sqs/ReceiveMessage/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "receivemessage", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "receivemessage", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/ReceiveMessage/Sources/entry.swift b/swift/example_code/sqs/ReceiveMessage/Sources/entry.swift new file mode 100644 index 00000000000..73433736dcf --- /dev/null +++ b/swift/example_code/sqs/ReceiveMessage/Sources/entry.swift @@ -0,0 +1,71 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Queue +// Service client to get the attributes of an available Amazon SQS queue. + +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Option(help: "The maximum number of messages to receive") + var maxMessages = 10 + @Argument(help: "The URL of the Amazon SQS queue to get the attributes of") + var url: String + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "receivemessage", + abstract: """ + This example shows how to receive messages from an Amazon SQS queue. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.ReceiveMessage] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + let output = try await sqsClient.receiveMessage( + input: ReceiveMessageInput( + maxNumberOfMessages: maxMessages, + queueUrl: url + ) + ) + + guard let messages = output.messages else { + print("No messages received.") + return + } + + for message in messages { + print("Message ID: \(message.messageId ?? "")") + print("Receipt handle: \(message.receiptHandle ?? "")") + print(message.body ?? "") + print("---") + } + + // snippet-end:[swift.sqs.ReceiveMessage] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sqs/SetQueueAttributes/Package.swift b/swift/example_code/sqs/SetQueueAttributes/Package.swift new file mode 100644 index 00000000000..991a4a13f4b --- /dev/null +++ b/swift/example_code/sqs/SetQueueAttributes/Package.swift @@ -0,0 +1,40 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +import PackageDescription + +let package = Package( + name: "configqueue", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "configqueue", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) diff --git a/swift/example_code/sqs/SetQueueAttributes/Sources/entry.swift b/swift/example_code/sqs/SetQueueAttributes/Sources/entry.swift new file mode 100644 index 00000000000..5477f331dba --- /dev/null +++ b/swift/example_code/sqs/SetQueueAttributes/Sources/entry.swift @@ -0,0 +1,65 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Queue +// Service client to get the attributes of an available Amazon SQS queue. + +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Argument(help: "The URL of the Amazon SQS queue to set attributes of") + var url: String + @Option(help: "Maximum size of a message in bytes, from 1024 to 262144") + var maxSize: Int + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "configqueue", + abstract: """ + This example shows how to set attributes of an Amazon + SQS queue, using the SQS client's setQueueAttributes() function. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.SetQueueAttributes] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + do { + _ = try await sqsClient.setQueueAttributes( + input: SetQueueAttributesInput( + attributes: [ + "MaximumMessageSize": "\(maxSize)" + ], + queueUrl: url + ) + ) + } catch _ as AWSSQS.InvalidAttributeValue { + print("Invalid maximum message size: \(maxSize) kB.") + } + // snippet-end:[swift.sqs.SetQueueAttributes] + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} diff --git a/swift/example_code/sqs/basics/Package.swift b/swift/example_code/sqs/basics/Package.swift new file mode 100644 index 00000000000..c8cab043cef --- /dev/null +++ b/swift/example_code/sqs/basics/Package.swift @@ -0,0 +1,42 @@ +// swift-tools-version: 5.9 +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// The swift-tools-version declares the minimum version of Swift required to +// build this package. + +// snippet-start:[swift.sqs.basics.package] +import PackageDescription + +let package = Package( + name: "sqs-basics", + // Let Xcode know the minimum Apple platforms supported. + platforms: [ + .macOS(.v13), + .iOS(.v15) + ], + dependencies: [ + // Dependencies declare other packages that this package depends on. + .package( + url: "https://github.com/awslabs/aws-sdk-swift", + from: "1.0.0"), + .package( + url: "https://github.com/apple/swift-argument-parser.git", + branch: "main" + ) + ], + targets: [ + // Targets are the basic building blocks of a package, defining a module or a test suite. + // Targets can depend on other targets in this package and products + // from dependencies. + .executableTarget( + name: "sqs-basics", + dependencies: [ + .product(name: "AWSSQS", package: "aws-sdk-swift"), + .product(name: "ArgumentParser", package: "swift-argument-parser") + ], + path: "Sources") + + ] +) +// snippet-end:[swift.sqs.basics.package] diff --git a/swift/example_code/sqs/basics/Sources/entry.swift b/swift/example_code/sqs/basics/Sources/entry.swift new file mode 100644 index 00000000000..39fa21499ab --- /dev/null +++ b/swift/example_code/sqs/basics/Sources/entry.swift @@ -0,0 +1,75 @@ +// Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. +// SPDX-License-Identifier: Apache-2.0 +// +// An example demonstrating how to set up and use an Amazon Simple Notification +// Service client to list your available Amazon SQS queues. + +// snippet-start:[swift.sqs.basics] +import ArgumentParser +import AWSClientRuntime +import AWSSQS +import Foundation + +struct ExampleCommand: ParsableCommand { + @Option(help: "Name of the Amazon Region to use (default: us-east-1)") + var region = "us-east-1" + + static var configuration = CommandConfiguration( + commandName: "sqs-basics", + abstract: """ + This example shows how to list all of your available Amazon SQS queues. + """, + discussion: """ + """ + ) + + /// Called by ``main()`` to run the bulk of the example. + func runAsync() async throws { + // snippet-start:[swift.sqs.ListQueues] + let config = try await SQSClient.SQSClientConfiguration(region: region) + let sqsClient = SQSClient(config: config) + + var queues: [String] = [] + let outputPages = sqsClient.listQueuesPaginated( + input: ListQueuesInput() + ) + + // Each time a page of results arrives, process its contents. + + for try await output in outputPages { + guard let urls = output.queueUrls else { + print("No queues found.") + return + } + + // Iterate over the queue URLs listed on this page, adding them + // to the `queues` array. + + for queueUrl in urls { + queues.append(queueUrl) + } + } + // snippet-end:[swift.sqs.ListQueues] + + print("You have \(queues.count) queues:") + for queue in queues { + print(" \(queue)") + } + } +} + +/// The program's asynchronous entry point. +@main +struct Main { + static func main() async { + let args = Array(CommandLine.arguments.dropFirst()) + + do { + let command = try ExampleCommand.parse(args) + try await command.runAsync() + } catch { + ExampleCommand.exit(withError: error) + } + } +} +// snippet-end:[swift.sqs.basics] diff --git a/swift/example_code/transcribe-streaming/README.md b/swift/example_code/transcribe-streaming/README.md index 5b39607d810..2c8f025d187 100644 --- a/swift/example_code/transcribe-streaming/README.md +++ b/swift/example_code/transcribe-streaming/README.md @@ -33,7 +33,7 @@ For prerequisites, see the [README](../../README.md#Prerequisites) in the `swift Code excerpts that show you how to call individual service functions. -- [StartStreamTranscription](transcribe-events/Sources/entry.swift#L145) +- [StartStreamTranscription](transcribe-events/Sources/entry.swift#L132) ### Scenarios diff --git a/swift/example_code/transcribe-streaming/transcribe-events/Sources/entry.swift b/swift/example_code/transcribe-streaming/transcribe-events/Sources/entry.swift index 5bf9572df95..b1f68259793 100644 --- a/swift/example_code/transcribe-streaming/transcribe-events/Sources/entry.swift +++ b/swift/example_code/transcribe-streaming/transcribe-events/Sources/entry.swift @@ -126,26 +126,13 @@ struct ExampleCommand: ParsableCommand { /// Run the transcription process. /// /// - Throws: An error from `TranscribeError`. - func transcribe() async throws { - // Convert the value of the `--format` option into the Transcribe - // Streaming `MediaEncoding` type. - - let mediaEncoding: TranscribeStreamingClientTypes.MediaEncoding - switch format { - case .flac: - mediaEncoding = .flac - case .ogg: - mediaEncoding = .oggOpus - case .pcm: - mediaEncoding = .pcm - } - + func transcribe(encoding: TranscribeStreamingClientTypes.MediaEncoding) async throws { // Create the Transcribe Streaming client. // snippet-start:[swift.transcribe-streaming.StartStreamTranscription] let client = TranscribeStreamingClient( config: try await TranscribeStreamingClient.TranscribeStreamingClientConfiguration( - region: region + region: region ) ) @@ -155,7 +142,7 @@ struct ExampleCommand: ParsableCommand { input: StartStreamTranscriptionInput( audioStream: try await createAudioStream(), languageCode: TranscribeStreamingClientTypes.LanguageCode(rawValue: lang), - mediaEncoding: mediaEncoding, + mediaEncoding: encoding, mediaSampleRateHertz: sampleRate ) ) @@ -200,6 +187,26 @@ struct ExampleCommand: ParsableCommand { } } // snippet-end:[swift.transcribe-streaming] + + /// Convert the value of the `--format` command line option into the + /// corresponding Transcribe Streaming `MediaEncoding` type. + /// + /// - Returns: The `MediaEncoding` equivalent of the format specified on + /// the command line. + func getMediaEncoding() -> TranscribeStreamingClientTypes.MediaEncoding { + let mediaEncoding: TranscribeStreamingClientTypes.MediaEncoding + + switch format { + case .flac: + mediaEncoding = .flac + case .ogg: + mediaEncoding = .oggOpus + case .pcm: + mediaEncoding = .pcm + } + + return mediaEncoding + } } // -MARK: - Entry point @@ -212,7 +219,7 @@ struct Main { do { let command = try ExampleCommand.parse(args) - try await command.transcribe() + try await command.transcribe(encoding: command.getMediaEncoding()) } catch let error as TranscribeError { print("ERROR: \(error.errorDescription ?? "Unknown error")") } catch {