diff --git a/.DS_Store b/.DS_Store index a273ef7..68e823e 100644 Binary files a/.DS_Store and b/.DS_Store differ diff --git a/.gitignore b/Import Service/.gitignore similarity index 100% rename from .gitignore rename to Import Service/.gitignore diff --git a/Import Service/README.md b/Import Service/README.md new file mode 100644 index 0000000..c53f0b5 --- /dev/null +++ b/Import Service/README.md @@ -0,0 +1,58 @@ + +# Welcome to your CDK Python project! + +This is a blank project for CDK development with Python. + +The `cdk.json` file tells the CDK Toolkit how to execute your app. + +This project is set up like a standard Python project. The initialization +process also creates a virtualenv within this project, stored under the `.venv` +directory. To create the virtualenv it assumes that there is a `python3` +(or `python` for Windows) executable in your path with access to the `venv` +package. If for any reason the automatic creation of the virtualenv fails, +you can create the virtualenv manually. + +To manually create a virtualenv on MacOS and Linux: + +``` +$ python3 -m venv .venv +``` + +After the init process completes and the virtualenv is created, you can use the following +step to activate your virtualenv. + +``` +$ source .venv/bin/activate +``` + +If you are a Windows platform, you would activate the virtualenv like this: + +``` +% .venv\Scripts\activate.bat +``` + +Once the virtualenv is activated, you can install the required dependencies. + +``` +$ pip install -r requirements.txt +``` + +At this point you can now synthesize the CloudFormation template for this code. + +``` +$ cdk synth +``` + +To add additional dependencies, for example other CDK libraries, just add +them to your `setup.py` file and rerun the `pip install -r requirements.txt` +command. + +## Useful commands + + * `cdk ls` list all stacks in the app + * `cdk synth` emits the synthesized CloudFormation template + * `cdk deploy` deploy this stack to your default AWS account/region + * `cdk diff` compare deployed stack with current state + * `cdk docs` open CDK documentation + +Enjoy! diff --git a/Import Service/app.py b/Import Service/app.py new file mode 100644 index 0000000..911b3bf --- /dev/null +++ b/Import Service/app.py @@ -0,0 +1,14 @@ +#!/usr/bin/env python3 +import os +from aws_cdk import App +from import_service.import_service_stack import ImportServiceStack + +app = App() +ImportServiceStack(app, "ImportServiceStack", + env=dict( + account=os.getenv('CDK_DEFAULT_ACCOUNT'), + region=os.getenv('CDK_DEFAULT_REGION') + ) +) + +app.synth() diff --git a/Import Service/cdk.json b/Import Service/cdk.json new file mode 100644 index 0000000..dbd7f13 --- /dev/null +++ b/Import Service/cdk.json @@ -0,0 +1,86 @@ +{ + "app": "python3 app.py", + "watch": { + "include": [ + "**" + ], + "exclude": [ + "README.md", + "cdk*.json", + "requirements*.txt", + "source.bat", + "**/__init__.py", + "**/__pycache__", + "tests" + ] + }, + "context": { + "@aws-cdk/aws-lambda:recognizeLayerVersion": true, + "@aws-cdk/core:checkSecretUsage": true, + "@aws-cdk/core:target-partitions": [ + "aws", + "aws-cn" + ], + "@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true, + "@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true, + "@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true, + "@aws-cdk/aws-iam:minimizePolicies": true, + "@aws-cdk/core:validateSnapshotRemovalPolicy": true, + "@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true, + "@aws-cdk/aws-s3:createDefaultLoggingPolicy": true, + "@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true, + "@aws-cdk/aws-apigateway:disableCloudWatchRole": true, + "@aws-cdk/core:enablePartitionLiterals": true, + "@aws-cdk/aws-events:eventsTargetQueueSameAccount": true, + "@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true, + "@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true, + "@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true, + "@aws-cdk/aws-route53-patters:useCertificate": true, + "@aws-cdk/customresources:installLatestAwsSdkDefault": false, + "@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true, + "@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true, + "@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true, + "@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true, + "@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true, + "@aws-cdk/aws-redshift:columnId": true, + "@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true, + "@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true, + "@aws-cdk/aws-apigateway:requestValidatorUniqueId": true, + "@aws-cdk/aws-kms:aliasNameRef": true, + "@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true, + "@aws-cdk/core:includePrefixInUniqueNameGeneration": true, + "@aws-cdk/aws-efs:denyAnonymousAccess": true, + "@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true, + "@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true, + "@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true, + "@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true, + "@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true, + "@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true, + "@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true, + "@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true, + "@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true, + "@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true, + "@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true, + "@aws-cdk/aws-eks:nodegroupNameAttribute": true, + "@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true, + "@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true, + "@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false, + "@aws-cdk/aws-s3:keepNotificationInImportedBucket": false, + "@aws-cdk/aws-ecs:enableImdsBlockingDeprecatedFeature": false, + "@aws-cdk/aws-ecs:disableEcsImdsBlocking": true, + "@aws-cdk/aws-ecs:reduceEc2FargateCloudWatchPermissions": true, + "@aws-cdk/aws-dynamodb:resourcePolicyPerReplica": true, + "@aws-cdk/aws-ec2:ec2SumTImeoutEnabled": true, + "@aws-cdk/aws-appsync:appSyncGraphQLAPIScopeLambdaPermission": true, + "@aws-cdk/aws-rds:setCorrectValueForDatabaseInstanceReadReplicaInstanceResourceId": true, + "@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true, + "@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true, + "@aws-cdk/aws-stepfunctions-tasks:fixRunEcsTaskPolicy": true, + "@aws-cdk/aws-ec2:bastionHostUseAmazonLinux2023ByDefault": true, + "@aws-cdk/aws-route53-targets:userPoolDomainNameMethodWithoutCustomResource": true, + "@aws-cdk/aws-elasticloadbalancingV2:albDualstackWithoutPublicIpv4SecurityGroupRulesDefault": true, + "@aws-cdk/aws-iam:oidcRejectUnauthorizedConnections": true, + "@aws-cdk/core:enableAdditionalMetadataCollection": true, + "@aws-cdk/aws-lambda:createNewPoliciesWithAddToRolePolicy": true + } +} diff --git a/product_service/__init__.py b/Import Service/import_service/__init__.py similarity index 100% rename from product_service/__init__.py rename to Import Service/import_service/__init__.py diff --git a/Import Service/import_service/api_gateway.py b/Import Service/import_service/api_gateway.py new file mode 100644 index 0000000..88dee9e --- /dev/null +++ b/Import Service/import_service/api_gateway.py @@ -0,0 +1,44 @@ +from aws_cdk import ( + aws_apigateway as apigateway, + aws_lambda as _lambda, + CfnOutput +) +from constructs import Construct + +def create_api_gateway(scope: Construct, import_products_lambda: _lambda.Function) -> apigateway.RestApi: + # Create API Gateway + api = apigateway.RestApi( + scope, 'ImportApi', + rest_api_name='Import Service API', + description='API Gateway for Import Service', + default_cors_preflight_options=apigateway.CorsOptions( + allow_origins=apigateway.Cors.ALL_ORIGINS, + allow_methods=apigateway.Cors.ALL_METHODS, + allow_headers=['Content-Type', 'X-Amz-Date', + 'Authorization', 'X-Api-Key'], + allow_credentials=True + ) + ) + + # Create API resource and method + import_resource = api.root.add_resource('import') + + import_resource.add_method( + 'GET', + apigateway.LambdaIntegration(import_products_lambda), + request_parameters={ + 'method.request.querystring.name': True + }, + request_validator_options=apigateway.RequestValidatorOptions( + validate_request_parameters=True + ) + ) + + # Output the API URL + CfnOutput( + scope, 'ApiUrl', + value=api.url, + description='API Gateway URL' + ) + + return api diff --git a/Import Service/import_service/import_products_lambda.py b/Import Service/import_service/import_products_lambda.py new file mode 100644 index 0000000..dddb59c --- /dev/null +++ b/Import Service/import_service/import_products_lambda.py @@ -0,0 +1,39 @@ +from aws_cdk import ( + aws_lambda as _lambda, + aws_iam as iam, + aws_s3 as s3, + Duration +) +from constructs import Construct + +def create_import_products_lambda( + scope: Construct, + import_bucket: s3.IBucket +) -> _lambda.Function: + + # Create importProductsFile Lambda + import_products_file = _lambda.Function( + scope, 'ImportProductsFile', + runtime=_lambda.Runtime.PYTHON_3_9, + handler='import_product_file.lambda_handler', + code=_lambda.Code.from_asset('import_service/lambda_func/'), + environment={ + 'BUCKET_NAME': import_bucket.bucket_name + }, + timeout=Duration.seconds(30), + memory_size=128 + ) + + # Grant S3 permissions to Lambda + import_bucket.grant_read_write(import_products_file) + import_bucket.grant_put(import_products_file) + + # Add additional S3 permissions for URL signing + import_products_file.add_to_role_policy( + iam.PolicyStatement( + actions=['s3:PutObject'], + resources=[f'{import_bucket.bucket_arn}/*'] + ) + ) + + return import_products_file diff --git a/Import Service/import_service/import_service_stack.py b/Import Service/import_service/import_service_stack.py new file mode 100644 index 0000000..7eea716 --- /dev/null +++ b/Import Service/import_service/import_service_stack.py @@ -0,0 +1,37 @@ +from aws_cdk import ( + Stack, + aws_s3 as s3, +) +from constructs import Construct +from .api_gateway import create_api_gateway +from .import_products_lambda import create_import_products_lambda +from .parse_products_lambda import create_parse_products_lambda + +class ImportServiceStack(Stack): + def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + + # Reference existing S3 bucket + import_bucket = s3.Bucket.from_bucket_name( + self, 'ImportBucket', + bucket_name='myimportservicebucket' + ) + + # Create Lambda functions + import_products_lambda = create_import_products_lambda( + self, + import_bucket + ) + + parse_products_lambda = create_parse_products_lambda( + self, + import_bucket + ) + + # Create API Gateway + api = create_api_gateway( + self, + import_products_lambda + ) + + diff --git a/Import Service/import_service/lambda_func/import_file_parser.py b/Import Service/import_service/lambda_func/import_file_parser.py new file mode 100644 index 0000000..e0a76c6 --- /dev/null +++ b/Import Service/import_service/lambda_func/import_file_parser.py @@ -0,0 +1,56 @@ +import json +import csv +import boto3 +import os +from io import StringIO + +def lambda_handler(event, context): + try: + # Get S3 client + s3_client = boto3.client('s3') + + # Get bucket and file details from the S3 event + bucket = event['Records'][0]['s3']['bucket']['name'] + key = event['Records'][0]['s3']['object']['key'] + + print(f"Processing file: {key} from bucket: {bucket}") + + # Get the object from S3 + response = s3_client.get_object(Bucket=bucket, Key=key) + + # Read the content of the file + file_content = response['Body'].read().decode('utf-8') + + # Create a StringIO object for CSV parsing + csv_file = StringIO(file_content) + + # Parse CSV + csv_reader = csv.DictReader(csv_file) + + # Process each row + for row in csv_reader: + # Log each record + print(f"Parsed record: {json.dumps(row)}") + + # Move file to parsed folder + new_key = key.replace('uploaded/', 'parsed/') + s3_client.copy_object( + Bucket=bucket, + CopySource={'Bucket': bucket, 'Key': key}, + Key=new_key + ) + + # Delete the file from uploaded folder + s3_client.delete_object(Bucket=bucket, Key=key) + + return { + 'statusCode': 200, + 'body': json.dumps('CSV processing completed successfully') + } + + except Exception as e: + print(f"Error processing file: {str(e)}") + return { + 'statusCode': 500, + 'body': json.dumps(f'Error processing file: {str(e)}') + } diff --git a/Import Service/import_service/lambda_func/import_product_file.py b/Import Service/import_service/lambda_func/import_product_file.py new file mode 100644 index 0000000..a09c714 --- /dev/null +++ b/Import Service/import_service/lambda_func/import_product_file.py @@ -0,0 +1,54 @@ +import json +import os +import boto3 +from botocore.config import Config +from urllib.parse import unquote + +def lambda_handler(event, context): + try: + # Get the query parameter + file_name = event['queryStringParameters']['name'] + + # Decode the filename in case it's URL encoded + file_name = unquote(file_name) + + # Configure S3 client with custom configuration + config = Config( + signature_version='v4', + region_name=os.environ['AWS_REGION'] + ) + s3_client = boto3.client('s3', config=config) + + # Generate signed URL + bucket_name = os.environ['BUCKET_NAME'] + key = f'uploaded/{file_name}' + + signed_url = s3_client.generate_presigned_url( + 'put_object', + Params={ + 'Bucket': bucket_name, + 'Key': key, + 'ContentType': 'text/csv' + }, + ExpiresIn=3600 # URL expires in 1 hour + ) + + return { + 'statusCode': 200, + 'headers': { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Credentials': True, + 'Access-Control-Allow-Methods' : 'OPTIONS,GET,POST,PUT,DELETE', + }, + 'body': signed_url + } + + except Exception as e: + return { + 'statusCode': 500, + 'headers': { + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Credentials': True + }, + 'body': json.dumps({'error': str(e)}) + } diff --git a/Import Service/import_service/parse_products_lambda.py b/Import Service/import_service/parse_products_lambda.py new file mode 100644 index 0000000..e1df061 --- /dev/null +++ b/Import Service/import_service/parse_products_lambda.py @@ -0,0 +1,49 @@ +from aws_cdk import ( + aws_lambda as _lambda, + aws_s3 as s3, + aws_s3_notifications as s3n, + Duration, + aws_iam as iam +) +from constructs import Construct + +def create_parse_products_lambda( + scope: Construct, + import_bucket: s3.IBucket +) -> _lambda.Function: + + # Create importFileParser Lambda + import_file_parser = _lambda.Function( + scope, 'ImportFileParser', + runtime=_lambda.Runtime.PYTHON_3_9, + handler='import_file_parser.lambda_handler', + code=_lambda.Code.from_asset('import_service/lambda_func/'), + environment={ + 'BUCKET_NAME': import_bucket.bucket_name + }, + timeout=Duration.seconds(60), + memory_size=256 + ) + + # Grant S3 permissions to Lambda + import_bucket.grant_read(import_file_parser) + import_bucket.grant_write(import_file_parser) + import_bucket.grant_put(import_file_parser) + import_bucket.grant_delete(import_file_parser) + + # Add S3 notification for the uploaded/ prefix + import_bucket.add_event_notification( + s3.EventType.OBJECT_CREATED, + s3n.LambdaDestination(import_file_parser), + s3.NotificationKeyFilter(prefix='uploaded/') + ) + + # Add Lambda permissions for S3 invocation + import_file_parser.add_permission( + 'AllowS3Invoke', + principal=iam.ServicePrincipal('s3.amazonaws.com'), + action='lambda:InvokeFunction', + source_arn=import_bucket.bucket_arn + ) + + return import_file_parser diff --git a/requirements-dev.txt b/Import Service/requirements-dev.txt similarity index 100% rename from requirements-dev.txt rename to Import Service/requirements-dev.txt diff --git a/Import Service/requirements.txt b/Import Service/requirements.txt new file mode 100644 index 0000000..101126b --- /dev/null +++ b/Import Service/requirements.txt @@ -0,0 +1,5 @@ +aws-cdk-lib==2.99.1 +constructs>=10.0.0 +pytest~=7.4.0 +pytest-mock~=3.11.1 +boto3~=1.28.0 diff --git a/source.bat b/Import Service/source.bat similarity index 100% rename from source.bat rename to Import Service/source.bat diff --git a/Import Service/test.csv b/Import Service/test.csv new file mode 100644 index 0000000..ffe39e5 --- /dev/null +++ b/Import Service/test.csv @@ -0,0 +1,3 @@ +title,description,price,count +Product 1,Test description 1,10.99,20 +Product 2,Test description 2,20.50,40 diff --git a/product_service/lambda_func/__init__.py b/Import Service/tests/__init__.py similarity index 100% rename from product_service/lambda_func/__init__.py rename to Import Service/tests/__init__.py diff --git a/tests/__init__.py b/Import Service/tests/unit/__init__.py similarity index 100% rename from tests/__init__.py rename to Import Service/tests/unit/__init__.py diff --git a/Import Service/tests/unit/test_import_file_parser.py b/Import Service/tests/unit/test_import_file_parser.py new file mode 100644 index 0000000..b038354 --- /dev/null +++ b/Import Service/tests/unit/test_import_file_parser.py @@ -0,0 +1,104 @@ +import unittest +from unittest.mock import patch, MagicMock, call +import json +from lambda.import_file_parser import lambda_handler + +class TestImportFileParser(unittest.TestCase): + @patch('lambda.import_file_parser.boto3.client') + def test_successful_csv_processing(self, mock_boto3_client): + # Mock S3 client and its methods + mock_s3_client = MagicMock() + mock_boto3_client.return_value = mock_s3_client + + # Mock S3 get_object response + mock_s3_client.get_object.return_value = { + 'Body': MagicMock( + read=lambda: b'title,description,price\nProduct1,Desc1,10.99\nProduct2,Desc2,20.99' + ) + } + + # Mock event + event = { + 'Records': [{ + 's3': { + 'bucket': {'name': 'myimportservicebucket'}, + 'object': {'key': 'uploaded/test.csv'} + } + }] + } + + # Call the lambda handler + response = lambda_handler(event, {}) + + # Assert successful response + self.assertEqual(response['statusCode'], 200) + + # Verify S3 operations were called correctly + mock_s3_client.get_object.assert_called_once_with( + Bucket='myimportservicebucket', + Key='uploaded/test.csv' + ) + + # Verify file was copied to parsed folder + mock_s3_client.copy_object.assert_called_once_with( + Bucket='myimportservicebucket', + CopySource={'Bucket': 'myimportservicebucket', 'Key': 'uploaded/test.csv'}, + Key='parsed/test.csv' + ) + + # Verify original file was deleted + mock_s3_client.delete_object.assert_called_once_with( + Bucket='myimportservicebucket', + Key='uploaded/test.csv' + ) + + @patch('lambda.import_file_parser.boto3.client') + def test_invalid_csv_format(self, mock_boto3_client): + # Mock S3 client with invalid CSV content + mock_s3_client = MagicMock() + mock_boto3_client.return_value = mock_s3_client + mock_s3_client.get_object.return_value = { + 'Body': MagicMock( + read=lambda: b'invalid,csv,format\nwithout,proper,headers' + ) + } + + # Mock event + event = { + 'Records': [{ + 's3': { + 'bucket': {'name': 'test-bucket'}, + 'object': {'key': 'uploaded/test.csv'} + } + }] + } + + # Call the lambda handler + response = lambda_handler(event, {}) + + # Assert error response + self.assertEqual(response['statusCode'], 500) + + @patch('lambda.import_file_parser.boto3.client') + def test_s3_error_handling(self, mock_boto3_client): + # Mock S3 client to raise an exception + mock_s3_client = MagicMock() + mock_boto3_client.return_value = mock_s3_client + mock_s3_client.get_object.side_effect = Exception('S3 Error') + + # Mock event + event = { + 'Records': [{ + 's3': { + 'bucket': {'name': 'test-bucket'}, + 'object': {'key': 'uploaded/test.csv'} + } + }] + } + + # Call the lambda handler + response = lambda_handler(event, {}) + + # Assert error response + self.assertEqual(response['statusCode'], 500) + self.assertIn('S3 Error', json.loads(response['body'])['error']) diff --git a/Import Service/tests/unit/test_import_product_file.py b/Import Service/tests/unit/test_import_product_file.py new file mode 100644 index 0000000..f2073fa --- /dev/null +++ b/Import Service/tests/unit/test_import_product_file.py @@ -0,0 +1,75 @@ +import unittest +from unittest.mock import patch, MagicMock +import json +from lambda.import_product_file import lambda_handler + +class TestImportProductFile(unittest.TestCase): + @patch('lambda.import_product_file.boto3.client') + def test_successful_url_generation(self, mock_boto3_client): + # Mock S3 client and its generate_presigned_url method + mock_s3_client = MagicMock() + mock_boto3_client.return_value = mock_s3_client + mock_s3_client.generate_presigned_url.return_value = 'https://test-signed-url' + + # Mock event and context + event = { + 'queryStringParameters': { + 'name': 'test.csv' + } + } + context = {} + + # Call the lambda handler + response = lambda_handler(event, context) + + # Assert response structure + self.assertEqual(response['statusCode'], 200) + self.assertEqual(json.loads(response['body']), 'https://test-signed-url') + + # Verify S3 client was called correctly + mock_s3_client.generate_presigned_url.assert_called_once_with( + 'put_object', + Params={ + 'Bucket': 'myimportservicebucket', + 'Key': 'uploaded/test.csv', + 'ContentType': 'text/csv' + }, + ExpiresIn=3600 + ) + + @patch('lambda.import_product_file.boto3.client') + def test_missing_name_parameter(self, mock_boto3_client): + # Mock event without name parameter + event = { + 'queryStringParameters': {} + } + context = {} + + # Call the lambda handler + response = lambda_handler(event, context) + + # Assert error response + self.assertEqual(response['statusCode'], 400) + self.assertIn('Missing name parameter', json.loads(response['body'])['error']) + + @patch('lambda.import_product_file.boto3.client') + def test_s3_error_handling(self, mock_boto3_client): + # Mock S3 client to raise an exception + mock_s3_client = MagicMock() + mock_boto3_client.return_value = mock_s3_client + mock_s3_client.generate_presigned_url.side_effect = Exception('S3 Error') + + # Mock event and context + event = { + 'queryStringParameters': { + 'name': 'test.csv' + } + } + context = {} + + # Call the lambda handler + response = lambda_handler(event, context) + + # Assert error response + self.assertEqual(response['statusCode'], 500) + self.assertIn('S3 Error', json.loads(response['body'])['error']) diff --git a/Import Service/tests/unit/test_import_service_stack.py b/Import Service/tests/unit/test_import_service_stack.py new file mode 100644 index 0000000..4089677 --- /dev/null +++ b/Import Service/tests/unit/test_import_service_stack.py @@ -0,0 +1,15 @@ +import aws_cdk as core +import aws_cdk.assertions as assertions + +from import_service.import_service_stack import ImportServiceStack + +# example tests. To run these tests, uncomment this file along with the example +# resource in import_service/import_service_stack.py +def test_sqs_queue_created(): + app = core.App() + stack = ImportServiceStack(app, "import-service") + template = assertions.Template.from_stack(stack) + +# template.has_resource_properties("AWS::SQS::Queue", { +# "VisibilityTimeout": 300 +# }) diff --git a/Product_Service2/.DS_Store b/Product_Service2/.DS_Store new file mode 100644 index 0000000..a273ef7 Binary files /dev/null and b/Product_Service2/.DS_Store differ diff --git a/.coverage b/Product_Service2/.coverage similarity index 100% rename from .coverage rename to Product_Service2/.coverage diff --git a/Product_Service2/.gitignore b/Product_Service2/.gitignore new file mode 100644 index 0000000..f61fe9b --- /dev/null +++ b/Product_Service2/.gitignore @@ -0,0 +1,15 @@ +*.swp +package-lock.json +__pycache__ +.pytest_cache +.venv +*.egg-info + +# CDK asset staging directory +.cdk.staging +cdk.out + +# Enviroment files +.env +*.env +.env.* diff --git a/Product_Service2/README.md b/Product_Service2/README.md new file mode 100644 index 0000000..d89e59f --- /dev/null +++ b/Product_Service2/README.md @@ -0,0 +1,14 @@ +## Environment Variables + +The following environment variables are required: + +- `AWS_REGION`: AWS region (default: us-east-1) +- `PRODUCTS_TABLE_NAME`: DynamoDB products table name +- `STOCKS_TABLE_NAME`: DynamoDB stocks table name +- `AWS_ACCESS_KEY_ID`: AWS access key +- `AWS_SECRET_ACCESS_KEY`: AWS secret access key + +Copy `.env.example` to `.env` and fill in your values: + +```bash +cp .env.example .env diff --git a/app.py b/Product_Service2/app.py similarity index 100% rename from app.py rename to Product_Service2/app.py diff --git a/cdk.json b/Product_Service2/cdk.json similarity index 100% rename from cdk.json rename to Product_Service2/cdk.json diff --git a/openapi.yaml b/Product_Service2/openapi.yaml similarity index 100% rename from openapi.yaml rename to Product_Service2/openapi.yaml diff --git a/tests/unit/__init__.py b/Product_Service2/product_service/__init__.py similarity index 100% rename from tests/unit/__init__.py rename to Product_Service2/product_service/__init__.py diff --git a/Product_Service2/product_service/api_gateway_stack.py b/Product_Service2/product_service/api_gateway_stack.py new file mode 100644 index 0000000..4194b8f --- /dev/null +++ b/Product_Service2/product_service/api_gateway_stack.py @@ -0,0 +1,239 @@ +""" from aws_cdk import ( + Stack, + aws_apigateway as apigw, + CfnOutput +) +from constructs import Construct + +class ApiGatewayStack(Stack): + def __init__(self, scope: Construct, construct_id: str, lambda_stack, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + + # Create API Gateway with CORS + api = apigw.RestApi( + self, 'ProductsApi', + rest_api_name='Products Service', + default_cors_preflight_options=apigw.CorsOptions( + allow_origins=apigw.Cors.ALL_ORIGINS, + allow_methods=apigw.Cors.ALL_METHODS, + allow_headers=[ + 'Content-Type', + 'X-Amz-Date', + 'Authorization', + 'X-Api-Key', + 'X-Amz-Security-Token', + ], + ) + ) + + # Create API resources and methods + products = api.root.add_resource('products') + products.add_method( + 'GET', + apigw.LambdaIntegration( + lambda_stack.list_products_function, + proxy=True, + integration_responses=[{ + 'statusCode': '200', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': "'*'" + } + }] + ), + method_responses=[{ + 'statusCode': '200', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + }] + ) + + product = products.add_resource('{productId}') + product.add_method( + 'GET', + apigw.LambdaIntegration( + lambda_stack.get_product_function, + proxy=True, + integration_responses=[{ + 'statusCode': '200', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': "'*'" + } + }] + ), + method_responses=[{ + 'statusCode': '200', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + }] + ) + + # Output the API URL + CfnOutput( + self, "APIGatewayURL", + value=f"{api.url}products", + description="API Gateway endpoint URL" + ) + """ +from aws_cdk import ( + Stack, + aws_apigateway as apigw, + CfnOutput + ) +from constructs import Construct + +class ApiGatewayStack(Stack): + def __init__(self, scope: Construct, construct_id: str, lambda_stack, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + + # Create API Gateway with CORS + api = apigw.RestApi( + self, 'ProductsApi', + rest_api_name='Products Service', + default_cors_preflight_options=apigw.CorsOptions( + allow_origins=apigw.Cors.ALL_ORIGINS, + allow_methods=apigw.Cors.ALL_METHODS, + allow_headers=[ + 'Content-Type', + 'X-Amz-Date', + 'Authorization', + 'X-Api-Key', + 'X-Amz-Security-Token', + ], + ) + ) + + # Common response configurations + cors_response_parameters = { + 'method.response.header.Access-Control-Allow-Origin': "'*'" + } + + success_response = { + 'statusCode': '200', + 'responseParameters': cors_response_parameters + } + + error_responses = [ + { + 'statusCode': '400', + 'responseParameters': cors_response_parameters + }, + { + 'statusCode': '500', + 'responseParameters': cors_response_parameters + } + ] + + method_responses = [ + { + 'statusCode': '200', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + }, + { + 'statusCode': '400', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + }, + { + 'statusCode': '500', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + } + ] + + # Create API resources and methods + products = api.root.add_resource('products') + + # GET /products + products.add_method( + 'GET', + apigw.LambdaIntegration( + lambda_stack.list_products_function, + proxy=True, + integration_responses=[success_response, *error_responses] + ), + method_responses=method_responses + ) + + # POST /products + products.add_method( + 'POST', + apigw.LambdaIntegration( + lambda_stack.product_function, + proxy=True, + integration_responses=[ + { + 'statusCode': '201', + 'responseParameters': cors_response_parameters + }, + *error_responses + ] + ), + method_responses=[ + { + 'statusCode': '201', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + }, + *method_responses[1:] # Include 400 and 500 responses + ] + ) + + # GET /products/{productId} + product = products.add_resource('{productId}') + product.add_method( + 'GET', + apigw.LambdaIntegration( + lambda_stack.get_product_function, + proxy=True, + integration_responses=[ + success_response, + { + 'statusCode': '404', + 'responseParameters': cors_response_parameters + }, + *error_responses + ] + ), + method_responses=[ + *method_responses, + { + 'statusCode': '404', + 'responseParameters': { + 'method.response.header.Access-Control-Allow-Origin': True + } + } + ] + ) + + # Output the API URL + CfnOutput( + self, "APIGatewayURL", + value=f"{api.url}products", + description="API Gateway endpoint URL" + ) + + # Output separate endpoints for each operation + CfnOutput( + self, "GetProductsURL", + value=f"{api.url}products", + description="GET products endpoint URL" + ) + + CfnOutput( + self, "CreateProductURL", + value=f"{api.url}products", + description="POST product endpoint URL" + ) + + CfnOutput( + self, "GetProductByIdURL", + value=f"{api.url}products/{{productId}}", + description="GET product by ID endpoint URL" + ) diff --git a/product_service/get_product_by_id.py b/Product_Service2/product_service/get_product_by_id.py similarity index 64% rename from product_service/get_product_by_id.py rename to Product_Service2/product_service/get_product_by_id.py index dad3e49..fd24cd4 100644 --- a/product_service/get_product_by_id.py +++ b/Product_Service2/product_service/get_product_by_id.py @@ -3,11 +3,13 @@ ) from constructs import Construct -def create_get_product_lambda(scope: Construct, id: str) -> _lambda.Function: +def create_get_product_lambda(scope: Construct, + id: str, environment: dict, role: None) -> _lambda.Function: return _lambda.Function( scope, id, runtime=_lambda.Runtime.PYTHON_3_9, handler='product_by_id.handler', - code=_lambda.Code.from_asset('product_service/lambda_func') + code=_lambda.Code.from_asset('product_service/lambda_func'), + environment=environment or {} ) diff --git a/product_service/get_products.py b/Product_Service2/product_service/get_products.py similarity index 65% rename from product_service/get_products.py rename to Product_Service2/product_service/get_products.py index ec81065..5855c7c 100644 --- a/product_service/get_products.py +++ b/Product_Service2/product_service/get_products.py @@ -3,11 +3,12 @@ ) from constructs import Construct -def create_list_products_lambda(scope: Construct, id: str) -> _lambda.Function: +def create_list_products_lambda(scope: Construct, id: str, environment: dict, role: None) -> _lambda.Function: return _lambda.Function( scope, id, runtime=_lambda.Runtime.PYTHON_3_9, handler='product_list.handler', - code=_lambda.Code.from_asset('product_service/lambda_func') + code=_lambda.Code.from_asset('product_service/lambda_func'), + environment=environment or {} ) diff --git a/README.md b/Product_Service2/product_service/lambda_func/__init__.py similarity index 100% rename from README.md rename to Product_Service2/product_service/lambda_func/__init__.py diff --git a/Product_Service2/product_service/lambda_func/create_product.py b/Product_Service2/product_service/lambda_func/create_product.py new file mode 100644 index 0000000..90c6cc3 --- /dev/null +++ b/Product_Service2/product_service/lambda_func/create_product.py @@ -0,0 +1,123 @@ +import json +import os +import boto3 +import uuid +from decimal import Decimal +from typing import Dict, Any + +class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return float(obj) + return super(DecimalEncoder, self).default(obj) + +def create_response(status_code: int, body: Any) -> Dict[str, Any]: + return { + 'statusCode': status_code, + 'headers': { + 'Content-Type': 'application/json', + 'Access-Control-Allow-Origin': '*', + 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', + 'Access-Control-Allow-Methods': 'OPTIONS,POST' + }, + 'body': json.dumps(body, cls=DecimalEncoder) + } + +def validate_product_data(data: Dict) -> tuple[bool, str]: + """Validate product data""" + if not data: + return False, "Request body is empty" + + required_fields = ['title', 'description', 'price', 'count'] + for field in required_fields: + if field not in data: + return False, f"Missing required field: {field}" + + if not isinstance(data['price'], (int, float)) or data['price'] <= 0: + return False, "Price must be a positive number" + + if not isinstance(data['count'], int) or data['count'] < 0: + return False, "Count must be a non-negative integer" + + return True, "" + + +def handler(event, context): + try: + print(f"Received event: {event}") + print(f"Context: RequestId: {context.aws_request_id}") + + # Parse request body + body = event.get('body', '{}') + if isinstance(body, str): + body = json.loads(body) + + print(f"Validating product data: {body}") + + # Validate request data + is_valid, error_message = validate_product_data(body) + if not is_valid: + print(f"Validation failed: {error_message}") + return create_response(400, {'message': error_message}) + + # Generate unique ID for the product + product_id = str(uuid.uuid4()) + + # Prepare product data + product_data = { + 'id': product_id, + 'title': body['title'], + 'description': body['description'], + 'price': Decimal(str(body['price'])) + } + + stock_data = { + 'product_id': product_id, + 'count': body['count'] + } + + # Initialize DynamoDB + dynamodb = boto3.resource('dynamodb', region_name=os.environ['REGION']) + + # Create transaction items + transaction_items = [ + { + 'Put': { + 'TableName': os.environ['PRODUCTS_TABLE_NAME'], + 'Item': product_data, + 'ConditionExpression': 'attribute_not_exists(id)' + } + }, + { + 'Put': { + 'TableName': os.environ['STOCKS_TABLE_NAME'], + 'Item': stock_data, + 'ConditionExpression': 'attribute_not_exists(product_id)' + } + } + ] + + # Execute transaction + print(f"Executing transaction for product: {product_id}") + dynamodb.meta.client.transact_write_items( + TransactItems=transaction_items + ) + + # Combine product and stock data for response + response_data = {**product_data, 'count': stock_data['count']} + + print(f"Successfully created product with ID: {product_id}") + return create_response(201, { + 'message': 'Product created successfully', + 'product': response_data + }) + + except dynamodb.meta.client.exceptions.TransactionCanceledException as e: + print(f"Transaction cancelled: {str(e)}") + return create_response(400, {'message': 'Failed to create product - transaction cancelled'}) + except json.JSONDecodeError as e: + print(f"JSON decode error: {str(e)}") + return create_response(400, {'message': 'Invalid JSON in request body'}) + except Exception as e: + print(f"Error: {str(e)}") + return create_response(500, {'message': f'Internal server error: {str(e)}'}) diff --git a/Product_Service2/product_service/lambda_func/product_by_id.py b/Product_Service2/product_service/lambda_func/product_by_id.py new file mode 100644 index 0000000..1b6e84e --- /dev/null +++ b/Product_Service2/product_service/lambda_func/product_by_id.py @@ -0,0 +1,71 @@ +import json +import os +import boto3 +from decimal import Decimal +from typing import Dict, Any, Optional + +class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return float(obj) + return super(DecimalEncoder, self).default(obj) + +def get_product_by_id(dynamodb, product_id: str) -> Optional[Dict[str, Any]]: + """Get product from DynamoDB by ID""" + table = dynamodb.Table(os.environ['PRODUCTS_TABLE_NAME']) + response = table.get_item( + Key={'id': product_id} + ) + return response.get('Item') + +def get_stock_by_product_id(dynamodb, product_id: str) -> Optional[Dict[str, Any]]: + """Get stock information from DynamoDB by product ID""" + table = dynamodb.Table(os.environ['STOCKS_TABLE_NAME']) + response = table.get_item( + Key={'product_id': product_id} + ) + return response.get('Item') + +def create_response(status_code: int, body: Any) -> Dict[str, Any]: + """Create API Gateway response""" + return { + 'statusCode': status_code, + 'headers': { + 'Access-Control-Allow-Origin': '*', + 'Content-Type': 'application/json' + }, + 'body': json.dumps(body, cls=DecimalEncoder) + } + +def handler(event, context): + try: + # Log the incoming event + print(f"Event: {json.dumps(event)}") + + # Get product ID from path parameters + product_id = event.get('pathParameters', {}).get('productId') + + if not product_id: + return create_response(400, {'message': 'Product ID is required'}) + + # Initialize DynamoDB client + dynamodb = boto3.resource('dynamodb', region_name=os.environ['REGION']) + + # Get product details + product = get_product_by_id(dynamodb, product_id) + + if not product: + return create_response(404, {'message': 'Product not found'}) + + # Get stock information + stock = get_stock_by_product_id(dynamodb, product_id) + + # Add stock count to product + product['count'] = stock['count'] if stock else 0 + + # Return successful response + return create_response(200, product) + + except Exception as e: + print(f"Error: {str(e)}") # Log the error + return create_response(500, {'message': f'Internal server error: {str(e)}'}) diff --git a/Product_Service2/product_service/lambda_func/product_list.py b/Product_Service2/product_service/lambda_func/product_list.py new file mode 100644 index 0000000..5e58de8 --- /dev/null +++ b/Product_Service2/product_service/lambda_func/product_list.py @@ -0,0 +1,67 @@ +import json +import os +import boto3 +from decimal import Decimal +from typing import Dict, List, Any + +# Custom JSON encoder for Decimal types +class DecimalEncoder(json.JSONEncoder): + def default(self, obj): + if isinstance(obj, Decimal): + return float(obj) + return super(DecimalEncoder, self).default(obj) + +def get_products(dynamodb) -> List[Dict[str, Any]]: + """Get all products from DynamoDB""" + table = dynamodb.Table(os.environ['PRODUCTS_TABLE_NAME']) + response = table.scan() + return response.get('Items', []) + +def get_stocks(dynamodb) -> List[Dict[str, Any]]: + """Get all stocks from DynamoDB""" + table = dynamodb.Table(os.environ['STOCKS_TABLE_NAME']) + response = table.scan() + return response.get('Items', []) + +def join_products_with_stocks(products: List[Dict], stocks: List[Dict]) -> List[Dict]: + """Join products with their stock information""" + stocks_by_id = {stock['product_id']: stock['count'] for stock in stocks} + + for product in products: + product['count'] = stocks_by_id.get(product['id'], 0) + + return products + +def create_response(status_code: int, body: Any) -> Dict[str, Any]: + """Create API Gateway response""" + return { + 'statusCode': status_code, + 'headers': { + 'Access-Control-Allow-Origin': '*', + 'Content-Type': 'application/json' + }, + 'body': json.dumps(body, cls=DecimalEncoder) # Used DecimalEncoder here + } + +def handler(event, context): + try: + + # Simple log for incoming request + print(f"Incoming request: {json.dumps(event)}") + print(f"Using DynamoDB table: {os.environ['PRODUCTS_TABLE_NAME']}") + # Initialize DynamoDB client + dynamodb = boto3.resource('dynamodb', region_name=os.environ['REGION']) + + # Get products and stocks + products = get_products(dynamodb) + stocks = get_stocks(dynamodb) + + # Join products with stocks + joined_products = join_products_with_stocks(products, stocks) + + # Return successful response + return create_response(200, joined_products) + + except Exception as e: + print(f"Error: {str(e)}") # Log the error + return create_response(500, {'message': f'Internal server error: {str(e)}'}) diff --git a/product_service/lambda_func/products_mock.py b/Product_Service2/product_service/lambda_func/products_mock.py similarity index 100% rename from product_service/lambda_func/products_mock.py rename to Product_Service2/product_service/lambda_func/products_mock.py diff --git a/Product_Service2/product_service/lambda_stack.py b/Product_Service2/product_service/lambda_stack.py new file mode 100644 index 0000000..779a4e3 --- /dev/null +++ b/Product_Service2/product_service/lambda_stack.py @@ -0,0 +1,108 @@ +from aws_cdk import ( + Stack, + aws_lambda as _lambda, + aws_iam as iam, + aws_dynamodb as dynamodb, +) +from constructs import Construct +from product_service.get_products import create_list_products_lambda +from product_service.get_product_by_id import create_get_product_lambda +from product_service.post_create_product import create_product_lambda + +class LambdaStack(Stack): + def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: + super().__init__(scope, construct_id, **kwargs) + + #Referencing DynamoDB + products_table = dynamodb.Table.from_table_name( + self, + 'ProductsTable', + table_name='products' + ) + + stocks_table = dynamodb.Table.from_table_name( + self, + 'StocksTable', + table_name='stocks' + ) + + print(f"Products Table Name: {products_table.table_name}") + print(f"Stocks Table Name: {stocks_table.table_name}") + + # Create Lambda role with explicit permissions + lambda_role = iam.Role( + self, 'ProductsLambdaRole', + assumed_by=iam.ServicePrincipal('lambda.amazonaws.com') + ) + + # Add CloudWatch Logs permissions + lambda_role.add_managed_policy( + iam.ManagedPolicy.from_aws_managed_policy_name( + 'service-role/AWSLambdaBasicExecutionRole' + ) + ) + + # Add DynamoDB permissions + lambda_role.add_to_policy( + iam.PolicyStatement( + actions=[ + 'dynamodb:Scan', + 'dynamodb:GetItem', + 'dynamodb:Query' + ], + resources=[ + products_table.table_arn, + stocks_table.table_arn + ] + ) + ) + #Creating Lambda functions + common_env = { + 'PRODUCTS_TABLE_NAME': products_table.table_name, + 'STOCKS_TABLE_NAME': stocks_table.table_name, + 'REGION' : Stack.of(self).region + } + + self.product_lambda = create_product_lambda( + self, + 'CreateProductFunction', + environment = common_env, + role = lambda_role + ) + + self.list_products_lambda = create_list_products_lambda( + self, + 'GetProductsFunction', + environment = common_env, + role = lambda_role + ) + + self.get_product_lambda = create_get_product_lambda( + self, + 'GetProductByIdFunction', + environment= common_env, + role = lambda_role + ) + + # Granting permissions to Lambda functions + products_table.grant_read_data(self.list_products_lambda) + stocks_table.grant_read_data(self.list_products_lambda) + products_table.grant_read_data(self.get_product_lambda) + stocks_table.grant_read_data(self.get_product_lambda) + products_table.grant_write_data(self.product_lambda) + stocks_table.grant_write_data(self.product_lambda) + + @property + def list_products_function(self): + return self.list_products_lambda + + @property + def get_product_function(self): + return self.get_product_lambda + + @property + def product_function(self): + return self.product_lambda + + + diff --git a/Product_Service2/product_service/populate_dynamoDB.py b/Product_Service2/product_service/populate_dynamoDB.py new file mode 100644 index 0000000..85cbbbd --- /dev/null +++ b/Product_Service2/product_service/populate_dynamoDB.py @@ -0,0 +1,99 @@ +import boto3 +import uuid +from botocore.exceptions import ClientError +from typing import List, Dict +import os +from dotenv import load_dotenv + +#Load enviroment variable from .env file +load_dotenv() + +# Get environment variables with fallback values +REGION = os.getenv('AWS_REGION', 'us-east-1') +PRODUCTS_TABLE = os.getenv('PRODUCTS_TABLE_NAME', 'products') +STOCKS_TABLE = os.getenv('STOCKS_TABLE_NAME', 'stocks') + +def get_dynamodb_resource(): + """Initialize DynamoDB resource with credentials from environment""" + return boto3.resource('dynamodb', region_name=REGION) + +def verify_tables(tables: List[str], dynamodb) -> bool: + """Verify if all required tables exist""" + existing_tables = list(dynamodb.tables.all()) + existing_table_names = [table.name for table in existing_tables] + + for table in tables: + if table not in existing_table_names: + print(f"āŒ Table {table} does not exist") + return False + return True + +def create_products_with_transactions(dynamodb, products): + """Create products and stocks using transactions""" + try: + for product in products: + product_id = str(uuid.uuid4()) + + # Create transaction items + transaction_items = [ + { + 'Put': { + 'TableName': PRODUCTS_TABLE, + 'Item': { + 'id': product_id, + 'title': product['title'], + 'description': product['description'], + 'price': product['price'] + } + } + }, + { + 'Put': { + 'TableName': STOCKS_TABLE, + 'Item': { + 'product_id': product_id, + 'count': 10 + } + } + } + ] + + # Execute transaction + dynamodb.meta.client.transact_write_items( + TransactItems=transaction_items + ) + print(f"āœ… Created product and stock for {product['title']}") + + except ClientError as e: + if e.response['Error']['Code'] == 'TransactionCanceledException': + print("āŒ Transaction cancelled:", e.response['CancellationReasons']) + raise + +def populate_tables(): + try: + dynamodb = get_dynamodb_resource() + + # Verify tables exist + if not verify_tables([PRODUCTS_TABLE, STOCKS_TABLE], dynamodb): + return + + # Sample product data + products = [ + {"title": "Vegan Protein Powder", "description": "Organic plant-based protein.", "price": 29}, + {"title": "Almond Butter", "description": "Smooth and natural almond butter.", "price": 12}, + {"title": "Quinoa Pasta", "description": "Gluten-free quinoa pasta.", "price": 8} + ] + + # Create products using transactions + create_products_with_transactions(dynamodb, products) + print("\nšŸŽ‰ All data inserted successfully!") + + except ClientError as e: + error_code = e.response['Error']['Code'] + error_message = e.response['Error']['Message'] + print(f"āŒ AWS Error ({error_code}): {error_message}") + except Exception as e: + print(f"āŒ Unexpected error: {str(e)}") + +if __name__ == "__main__": + populate_tables() diff --git a/Product_Service2/product_service/post_create_product.py b/Product_Service2/product_service/post_create_product.py new file mode 100644 index 0000000..66f50d2 --- /dev/null +++ b/Product_Service2/product_service/post_create_product.py @@ -0,0 +1,27 @@ +from aws_cdk import ( + aws_lambda as _lambda, + Stack +) +from constructs import Construct +from typing import Dict + +def create_product_lambda( + scope: Construct, id: str,environment: dict, role: None) -> _lambda.Function: + """ + Create the createProduct Lambda function + Args: + scope: CDK Construct scope + id: Construct ID + products_table_name: Name of the products table + stocks_table_name: Name of the stocks table + Returns: + _lambda.Function: The created Lambda function + """ + return _lambda.Function( + scope, + id, + runtime=_lambda.Runtime.PYTHON_3_9, + handler='create_product.handler', + code=_lambda.Code.from_asset('product_service/lambda_func'), + environment = environment or {} + ) diff --git a/product_service/product_service_stack.py b/Product_Service2/product_service/product_service_stack.py similarity index 100% rename from product_service/product_service_stack.py rename to Product_Service2/product_service/product_service_stack.py diff --git a/pytest.ini b/Product_Service2/pytest.ini similarity index 100% rename from pytest.ini rename to Product_Service2/pytest.ini diff --git a/Product_Service2/requirements-dev.txt b/Product_Service2/requirements-dev.txt new file mode 100644 index 0000000..9270945 --- /dev/null +++ b/Product_Service2/requirements-dev.txt @@ -0,0 +1 @@ +pytest==6.2.5 diff --git a/requirements.txt b/Product_Service2/requirements.txt similarity index 61% rename from requirements.txt rename to Product_Service2/requirements.txt index 4de807b..c7f5618 100644 --- a/requirements.txt +++ b/Product_Service2/requirements.txt @@ -1,7 +1,8 @@ -aws-cdk-lib==2.178.1 +aws-cdk-lib~=2.0.0 constructs>=10.0.0,<11.0.0 pytest>=6.0 pytest-cov # aws-cdk.aws-lambda # aws-cdk.aws-apigateway -# boto3 \ No newline at end of file +aws-lambda-powertools>=2.0.0 +boto3>=1.26.0 \ No newline at end of file diff --git a/Product_Service2/source.bat b/Product_Service2/source.bat new file mode 100644 index 0000000..9e1a834 --- /dev/null +++ b/Product_Service2/source.bat @@ -0,0 +1,13 @@ +@echo off + +rem The sole purpose of this script is to make the command +rem +rem source .venv/bin/activate +rem +rem (which activates a Python virtualenv on Linux or Mac OS X) work on Windows. +rem On Windows, this command just runs this batch file (the argument is ignored). +rem +rem Now we don't need to document a Windows command for activating a virtualenv. + +echo Executing .venv\Scripts\activate.bat for you +.venv\Scripts\activate.bat diff --git a/Product_Service2/tests/__init__.py b/Product_Service2/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/Product_Service2/tests/conftest.py similarity index 100% rename from tests/conftest.py rename to Product_Service2/tests/conftest.py diff --git a/Product_Service2/tests/unit/__init__.py b/Product_Service2/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/lambda_test/test_product_functions.py b/Product_Service2/tests/unit/lambda_test/test_product_functions.py similarity index 100% rename from tests/unit/lambda_test/test_product_functions.py rename to Product_Service2/tests/unit/lambda_test/test_product_functions.py diff --git a/tests/unit/test_product_servie_stack.py b/Product_Service2/tests/unit/test_product_servie_stack.py similarity index 100% rename from tests/unit/test_product_servie_stack.py rename to Product_Service2/tests/unit/test_product_servie_stack.py diff --git a/product_service/api_gateway_stack.py b/product_service/api_gateway_stack.py deleted file mode 100644 index 1d813ca..0000000 --- a/product_service/api_gateway_stack.py +++ /dev/null @@ -1,77 +0,0 @@ -from aws_cdk import ( - Stack, - aws_apigateway as apigw, - CfnOutput -) -from constructs import Construct - -class ApiGatewayStack(Stack): - def __init__(self, scope: Construct, construct_id: str, lambda_stack, **kwargs) -> None: - super().__init__(scope, construct_id, **kwargs) - - # Create API Gateway with CORS - api = apigw.RestApi( - self, 'ProductsApi', - rest_api_name='Products Service', - default_cors_preflight_options=apigw.CorsOptions( - allow_origins=apigw.Cors.ALL_ORIGINS, - allow_methods=apigw.Cors.ALL_METHODS, - allow_headers=[ - 'Content-Type', - 'X-Amz-Date', - 'Authorization', - 'X-Api-Key', - 'X-Amz-Security-Token', - ], - ) - ) - - # Create API resources and methods - products = api.root.add_resource('products') - products.add_method( - 'GET', - apigw.LambdaIntegration( - lambda_stack.list_products_function, - proxy=True, - integration_responses=[{ - 'statusCode': '200', - 'responseParameters': { - 'method.response.header.Access-Control-Allow-Origin': "'*'" - } - }] - ), - method_responses=[{ - 'statusCode': '200', - 'responseParameters': { - 'method.response.header.Access-Control-Allow-Origin': True - } - }] - ) - - product = products.add_resource('{productId}') - product.add_method( - 'GET', - apigw.LambdaIntegration( - lambda_stack.get_product_function, - proxy=True, - integration_responses=[{ - 'statusCode': '200', - 'responseParameters': { - 'method.response.header.Access-Control-Allow-Origin': "'*'" - } - }] - ), - method_responses=[{ - 'statusCode': '200', - 'responseParameters': { - 'method.response.header.Access-Control-Allow-Origin': True - } - }] - ) - - # Output the API URL - CfnOutput( - self, "APIGatewayURL", - value=f"{api.url}products", - description="API Gateway endpoint URL" - ) diff --git a/product_service/lambda_func/product_by_id.py b/product_service/lambda_func/product_by_id.py deleted file mode 100644 index 0a545c7..0000000 --- a/product_service/lambda_func/product_by_id.py +++ /dev/null @@ -1,30 +0,0 @@ -from product_service.lambda_func.products_mock import products -import json - -def handler(event, context): - product_id = event['pathParameters']['productId'] - - product = next( - (item for item in products if item["id"] == product_id), - None - ) - - headers = { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', - 'Access-Control-Allow-Methods': 'OPTIONS,GET' - } - - if not product: - return { - 'statusCode': 404, - 'headers': headers, - 'body': json.dumps({'message': 'Product not found'}) - } - - return { - 'statusCode': 200, - 'headers': headers, - 'body': json.dumps(product) - } diff --git a/product_service/lambda_func/product_list.py b/product_service/lambda_func/product_list.py deleted file mode 100644 index f796c46..0000000 --- a/product_service/lambda_func/product_list.py +++ /dev/null @@ -1,14 +0,0 @@ -from product_service.lambda_func.products_mock import products -import json - -def handler(event, context): - return { - 'statusCode': 200, - 'headers': { - 'Content-Type': 'application/json', - 'Access-Control-Allow-Origin': '*', - 'Access-Control-Allow-Headers': 'Content-Type,X-Amz-Date,Authorization,X-Api-Key,X-Amz-Security-Token', - 'Access-Control-Allow-Methods': 'OPTIONS,GET' - }, - 'body': json.dumps(products) - } diff --git a/product_service/lambda_stack.py b/product_service/lambda_stack.py deleted file mode 100644 index c07311c..0000000 --- a/product_service/lambda_stack.py +++ /dev/null @@ -1,26 +0,0 @@ -from aws_cdk import Stack -from constructs import Construct -from product_service.get_products import create_list_products_lambda -from product_service.get_product_by_id import create_get_product_lambda - -class LambdaStack(Stack): - def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None: - super().__init__(scope, construct_id, **kwargs) - - self.list_products_lambda = create_list_products_lambda( - self, - 'GetProductsFunction' - ) - - self.get_product_lambda = create_get_product_lambda( - self, - 'GetProductByIdFunction' - ) - - @property - def list_products_function(self): - return self.list_products_lambda - - @property - def get_product_function(self): - return self.get_product_lambda