Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Binary file modified .DS_Store
Binary file not shown.
File renamed without changes.
58 changes: 58 additions & 0 deletions Import Service/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@

# Welcome to your CDK Python project!

This is a blank project for CDK development with Python.

The `cdk.json` file tells the CDK Toolkit how to execute your app.

This project is set up like a standard Python project. The initialization
process also creates a virtualenv within this project, stored under the `.venv`
directory. To create the virtualenv it assumes that there is a `python3`
(or `python` for Windows) executable in your path with access to the `venv`
package. If for any reason the automatic creation of the virtualenv fails,
you can create the virtualenv manually.

To manually create a virtualenv on MacOS and Linux:

```
$ python3 -m venv .venv
```

After the init process completes and the virtualenv is created, you can use the following
step to activate your virtualenv.

```
$ source .venv/bin/activate
```

If you are a Windows platform, you would activate the virtualenv like this:

```
% .venv\Scripts\activate.bat
```

Once the virtualenv is activated, you can install the required dependencies.

```
$ pip install -r requirements.txt
```

At this point you can now synthesize the CloudFormation template for this code.

```
$ cdk synth
```

To add additional dependencies, for example other CDK libraries, just add
them to your `setup.py` file and rerun the `pip install -r requirements.txt`
command.

## Useful commands

* `cdk ls` list all stacks in the app
* `cdk synth` emits the synthesized CloudFormation template
* `cdk deploy` deploy this stack to your default AWS account/region
* `cdk diff` compare deployed stack with current state
* `cdk docs` open CDK documentation

Enjoy!
14 changes: 14 additions & 0 deletions Import Service/app.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/usr/bin/env python3
import os
from aws_cdk import App
from import_service.import_service_stack import ImportServiceStack

app = App()
ImportServiceStack(app, "ImportServiceStack",
env=dict(
account=os.getenv('CDK_DEFAULT_ACCOUNT'),
region=os.getenv('CDK_DEFAULT_REGION')
)
)

app.synth()
86 changes: 86 additions & 0 deletions Import Service/cdk.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,86 @@
{
"app": "python3 app.py",
"watch": {
"include": [
"**"
],
"exclude": [
"README.md",
"cdk*.json",
"requirements*.txt",
"source.bat",
"**/__init__.py",
"**/__pycache__",
"tests"
]
},
"context": {
"@aws-cdk/aws-lambda:recognizeLayerVersion": true,
"@aws-cdk/core:checkSecretUsage": true,
"@aws-cdk/core:target-partitions": [
"aws",
"aws-cn"
],
"@aws-cdk-containers/ecs-service-extensions:enableDefaultLogDriver": true,
"@aws-cdk/aws-ec2:uniqueImdsv2TemplateName": true,
"@aws-cdk/aws-ecs:arnFormatIncludesClusterName": true,
"@aws-cdk/aws-iam:minimizePolicies": true,
"@aws-cdk/core:validateSnapshotRemovalPolicy": true,
"@aws-cdk/aws-codepipeline:crossAccountKeyAliasStackSafeResourceName": true,
"@aws-cdk/aws-s3:createDefaultLoggingPolicy": true,
"@aws-cdk/aws-sns-subscriptions:restrictSqsDescryption": true,
"@aws-cdk/aws-apigateway:disableCloudWatchRole": true,
"@aws-cdk/core:enablePartitionLiterals": true,
"@aws-cdk/aws-events:eventsTargetQueueSameAccount": true,
"@aws-cdk/aws-ecs:disableExplicitDeploymentControllerForCircuitBreaker": true,
"@aws-cdk/aws-iam:importedRoleStackSafeDefaultPolicyName": true,
"@aws-cdk/aws-s3:serverAccessLogsUseBucketPolicy": true,
"@aws-cdk/aws-route53-patters:useCertificate": true,
"@aws-cdk/customresources:installLatestAwsSdkDefault": false,
"@aws-cdk/aws-rds:databaseProxyUniqueResourceName": true,
"@aws-cdk/aws-codedeploy:removeAlarmsFromDeploymentGroup": true,
"@aws-cdk/aws-apigateway:authorizerChangeDeploymentLogicalId": true,
"@aws-cdk/aws-ec2:launchTemplateDefaultUserData": true,
"@aws-cdk/aws-secretsmanager:useAttachedSecretResourcePolicyForSecretTargetAttachments": true,
"@aws-cdk/aws-redshift:columnId": true,
"@aws-cdk/aws-stepfunctions-tasks:enableEmrServicePolicyV2": true,
"@aws-cdk/aws-ec2:restrictDefaultSecurityGroup": true,
"@aws-cdk/aws-apigateway:requestValidatorUniqueId": true,
"@aws-cdk/aws-kms:aliasNameRef": true,
"@aws-cdk/aws-autoscaling:generateLaunchTemplateInsteadOfLaunchConfig": true,
"@aws-cdk/core:includePrefixInUniqueNameGeneration": true,
"@aws-cdk/aws-efs:denyAnonymousAccess": true,
"@aws-cdk/aws-opensearchservice:enableOpensearchMultiAzWithStandby": true,
"@aws-cdk/aws-lambda-nodejs:useLatestRuntimeVersion": true,
"@aws-cdk/aws-efs:mountTargetOrderInsensitiveLogicalId": true,
"@aws-cdk/aws-rds:auroraClusterChangeScopeOfInstanceParameterGroupWithEachParameters": true,
"@aws-cdk/aws-appsync:useArnForSourceApiAssociationIdentifier": true,
"@aws-cdk/aws-rds:preventRenderingDeprecatedCredentials": true,
"@aws-cdk/aws-codepipeline-actions:useNewDefaultBranchForCodeCommitSource": true,
"@aws-cdk/aws-cloudwatch-actions:changeLambdaPermissionLogicalIdForLambdaAction": true,
"@aws-cdk/aws-codepipeline:crossAccountKeysDefaultValueToFalse": true,
"@aws-cdk/aws-codepipeline:defaultPipelineTypeToV2": true,
"@aws-cdk/aws-kms:reduceCrossAccountRegionPolicyScope": true,
"@aws-cdk/aws-eks:nodegroupNameAttribute": true,
"@aws-cdk/aws-ec2:ebsDefaultGp3Volume": true,
"@aws-cdk/aws-ecs:removeDefaultDeploymentAlarm": true,
"@aws-cdk/custom-resources:logApiResponseDataPropertyTrueDefault": false,
"@aws-cdk/aws-s3:keepNotificationInImportedBucket": false,
"@aws-cdk/aws-ecs:enableImdsBlockingDeprecatedFeature": false,
"@aws-cdk/aws-ecs:disableEcsImdsBlocking": true,
"@aws-cdk/aws-ecs:reduceEc2FargateCloudWatchPermissions": true,
"@aws-cdk/aws-dynamodb:resourcePolicyPerReplica": true,
"@aws-cdk/aws-ec2:ec2SumTImeoutEnabled": true,
"@aws-cdk/aws-appsync:appSyncGraphQLAPIScopeLambdaPermission": true,
"@aws-cdk/aws-rds:setCorrectValueForDatabaseInstanceReadReplicaInstanceResourceId": true,
"@aws-cdk/core:cfnIncludeRejectComplexResourceUpdateCreatePolicyIntrinsics": true,
"@aws-cdk/aws-lambda-nodejs:sdkV3ExcludeSmithyPackages": true,
"@aws-cdk/aws-stepfunctions-tasks:fixRunEcsTaskPolicy": true,
"@aws-cdk/aws-ec2:bastionHostUseAmazonLinux2023ByDefault": true,
"@aws-cdk/aws-route53-targets:userPoolDomainNameMethodWithoutCustomResource": true,
"@aws-cdk/aws-elasticloadbalancingV2:albDualstackWithoutPublicIpv4SecurityGroupRulesDefault": true,
"@aws-cdk/aws-iam:oidcRejectUnauthorizedConnections": true,
"@aws-cdk/core:enableAdditionalMetadataCollection": true,
"@aws-cdk/aws-lambda:createNewPoliciesWithAddToRolePolicy": true
}
}
File renamed without changes.
44 changes: 44 additions & 0 deletions Import Service/import_service/api_gateway.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
from aws_cdk import (
aws_apigateway as apigateway,
aws_lambda as _lambda,
CfnOutput
)
from constructs import Construct

def create_api_gateway(scope: Construct, import_products_lambda: _lambda.Function) -> apigateway.RestApi:
# Create API Gateway
api = apigateway.RestApi(
scope, 'ImportApi',
rest_api_name='Import Service API',
description='API Gateway for Import Service',
default_cors_preflight_options=apigateway.CorsOptions(
allow_origins=apigateway.Cors.ALL_ORIGINS,
allow_methods=apigateway.Cors.ALL_METHODS,
allow_headers=['Content-Type', 'X-Amz-Date',
'Authorization', 'X-Api-Key'],
allow_credentials=True
)
)

# Create API resource and method
import_resource = api.root.add_resource('import')

import_resource.add_method(
'GET',
apigateway.LambdaIntegration(import_products_lambda),
request_parameters={
'method.request.querystring.name': True
},
request_validator_options=apigateway.RequestValidatorOptions(
validate_request_parameters=True
)
)

# Output the API URL
CfnOutput(
scope, 'ApiUrl',
value=api.url,
description='API Gateway URL'
)

return api
39 changes: 39 additions & 0 deletions Import Service/import_service/import_products_lambda.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
from aws_cdk import (
aws_lambda as _lambda,
aws_iam as iam,
aws_s3 as s3,
Duration
)
from constructs import Construct

def create_import_products_lambda(
scope: Construct,
import_bucket: s3.IBucket
) -> _lambda.Function:

# Create importProductsFile Lambda
import_products_file = _lambda.Function(
scope, 'ImportProductsFile',
runtime=_lambda.Runtime.PYTHON_3_9,
handler='import_product_file.lambda_handler',
code=_lambda.Code.from_asset('import_service/lambda_func/'),
environment={
'BUCKET_NAME': import_bucket.bucket_name
},
timeout=Duration.seconds(30),
memory_size=128
)

# Grant S3 permissions to Lambda
import_bucket.grant_read_write(import_products_file)
import_bucket.grant_put(import_products_file)

# Add additional S3 permissions for URL signing
import_products_file.add_to_role_policy(
iam.PolicyStatement(
actions=['s3:PutObject'],
resources=[f'{import_bucket.bucket_arn}/*']
)
)

return import_products_file
37 changes: 37 additions & 0 deletions Import Service/import_service/import_service_stack.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from aws_cdk import (
Stack,
aws_s3 as s3,
)
from constructs import Construct
from .api_gateway import create_api_gateway
from .import_products_lambda import create_import_products_lambda
from .parse_products_lambda import create_parse_products_lambda

class ImportServiceStack(Stack):
def __init__(self, scope: Construct, construct_id: str, **kwargs) -> None:
super().__init__(scope, construct_id, **kwargs)

# Reference existing S3 bucket
import_bucket = s3.Bucket.from_bucket_name(
self, 'ImportBucket',
bucket_name='myimportservicebucket'
)

# Create Lambda functions
import_products_lambda = create_import_products_lambda(
self,
import_bucket
)

parse_products_lambda = create_parse_products_lambda(
self,
import_bucket
)

# Create API Gateway
api = create_api_gateway(
self,
import_products_lambda
)


56 changes: 56 additions & 0 deletions Import Service/import_service/lambda_func/import_file_parser.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import json
import csv
import boto3
import os
from io import StringIO

def lambda_handler(event, context):
try:
# Get S3 client
s3_client = boto3.client('s3')

# Get bucket and file details from the S3 event
bucket = event['Records'][0]['s3']['bucket']['name']
key = event['Records'][0]['s3']['object']['key']

print(f"Processing file: {key} from bucket: {bucket}")

# Get the object from S3
response = s3_client.get_object(Bucket=bucket, Key=key)

# Read the content of the file
file_content = response['Body'].read().decode('utf-8')

# Create a StringIO object for CSV parsing
csv_file = StringIO(file_content)

# Parse CSV
csv_reader = csv.DictReader(csv_file)

# Process each row
for row in csv_reader:
# Log each record
print(f"Parsed record: {json.dumps(row)}")

# Move file to parsed folder
new_key = key.replace('uploaded/', 'parsed/')
s3_client.copy_object(
Bucket=bucket,
CopySource={'Bucket': bucket, 'Key': key},
Key=new_key
)

# Delete the file from uploaded folder
s3_client.delete_object(Bucket=bucket, Key=key)

return {
'statusCode': 200,
'body': json.dumps('CSV processing completed successfully')
}

except Exception as e:
print(f"Error processing file: {str(e)}")
return {
'statusCode': 500,
'body': json.dumps(f'Error processing file: {str(e)}')
}
54 changes: 54 additions & 0 deletions Import Service/import_service/lambda_func/import_product_file.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
import json
import os
import boto3
from botocore.config import Config
from urllib.parse import unquote

def lambda_handler(event, context):
try:
# Get the query parameter
file_name = event['queryStringParameters']['name']

# Decode the filename in case it's URL encoded
file_name = unquote(file_name)

# Configure S3 client with custom configuration
config = Config(
signature_version='v4',
region_name=os.environ['AWS_REGION']
)
s3_client = boto3.client('s3', config=config)

# Generate signed URL
bucket_name = os.environ['BUCKET_NAME']
key = f'uploaded/{file_name}'

signed_url = s3_client.generate_presigned_url(
'put_object',
Params={
'Bucket': bucket_name,
'Key': key,
'ContentType': 'text/csv'
},
ExpiresIn=3600 # URL expires in 1 hour
)

return {
'statusCode': 200,
'headers': {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': True,
'Access-Control-Allow-Methods' : 'OPTIONS,GET,POST,PUT,DELETE',
},
'body': signed_url
}

except Exception as e:
return {
'statusCode': 500,
'headers': {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': True
},
'body': json.dumps({'error': str(e)})
}
Loading