From 0975e0a7a761344f3365a9dcd3af33a57b1c460e Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 16 Oct 2025 19:14:13 +0530 Subject: [PATCH 01/35] Install zappa and initial example configuration --- .gitignore | 3 + backend/poetry.lock | 303 +++++++++++++++++++++++++++- backend/pyproject.toml | 1 + backend/zappa_settings.example.json | 34 ++++ 4 files changed, 338 insertions(+), 3 deletions(-) create mode 100644 backend/zappa_settings.example.json diff --git a/.gitignore b/.gitignore index 1379e1b8cd..e089996112 100644 --- a/.gitignore +++ b/.gitignore @@ -24,8 +24,11 @@ __pycache__ *.log *.pdf *.pem +backend/*nest-backend-staging*.zip +backend/*nest-backend-staging*.tar.gz backend/data/backup* backend/staticfiles +backend/zappa_settings.json frontend/blob-report/ frontend/coverage frontend/dist diff --git a/backend/poetry.lock b/backend/poetry.lock index 322cc36ac7..5e75824228 100644 --- a/backend/poetry.lock +++ b/backend/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. [[package]] name = "aiohappyeyeballs" @@ -235,6 +235,21 @@ sniffio = ">=1.1" [package.extras] trio = ["trio (>=0.31.0)"] +[[package]] +name = "argcomplete" +version = "3.6.2" +description = "Bash tab completion for argparse" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591"}, + {file = "argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf"}, +] + +[package.extras] +test = ["coverage", "mypy", "pexpect", "ruff", "wheel"] + [[package]] name = "asgiref" version = "3.10.0" @@ -436,6 +451,23 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] +[[package]] +name = "cfn-flip" +version = "1.3.0" +description = "Convert AWS CloudFormation templates between JSON and YAML formats" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "cfn_flip-1.3.0-py3-none-any.whl", hash = "sha256:faca8e77f0d32fb84cce1db1ef4c18b14a325d31125dae73c13bcc01947d2722"}, + {file = "cfn_flip-1.3.0.tar.gz", hash = "sha256:003e02a089c35e1230ffd0e1bcfbbc4b12cc7d2deb2fcc6c4228ac9819307362"}, +] + +[package.dependencies] +Click = "*" +PyYAML = ">=4.1" +six = "*" + [[package]] name = "charset-normalizer" version = "3.4.3" @@ -531,7 +563,7 @@ version = "8.3.0" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.10" -groups = ["dev"] +groups = ["main", "dev"] files = [ {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"}, {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"}, @@ -971,6 +1003,18 @@ pyyaml = ">=6" regex = ">=2023" tqdm = ">=4.62.2" +[[package]] +name = "durationpy" +version = "0.10" +description = "Module for converting between datetime.timedelta and Go's Duration strings." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "durationpy-0.10-py3-none-any.whl", hash = "sha256:3b41e1b601234296b4fb368338fdcd3e13e0b4fb5b67345948f4f2bf9868b286"}, + {file = "durationpy-0.10.tar.gz", hash = "sha256:1fa6893409a6e739c9c72334fc65cca1f355dbdd93405d30f726deb5bde42fba"}, +] + [[package]] name = "editorconfig" version = "0.17.1" @@ -1319,6 +1363,18 @@ files = [ {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, ] +[[package]] +name = "hjson" +version = "3.1.0" +description = "Hjson, a user interface for JSON." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "hjson-3.1.0-py3-none-any.whl", hash = "sha256:65713cdcf13214fb554eb8b4ef803419733f4f5e551047c9b711098ab7186b89"}, + {file = "hjson-3.1.0.tar.gz", hash = "sha256:55af475a27cf83a7969c808399d7bccdec8fb836a07ddbd574587593b9cdcf75"}, +] + [[package]] name = "httpcore" version = "1.0.9" @@ -1648,6 +1704,24 @@ files = [ [package.dependencies] referencing = ">=0.31.0" +[[package]] +name = "kappa" +version = "0.6.0" +description = "A CLI tool for AWS Lambda developers" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "kappa-0.6.0-py2-none-any.whl", hash = "sha256:4d6b7b3accce4a0aaaac92b36237a6304f0f2fffbbe3caea3f7c9f52d12c9989"}, + {file = "kappa-0.6.0.tar.gz", hash = "sha256:4b5b372872f25d619e427e04282551048dc975a107385b076b3ffc6406a15833"}, +] + +[package.dependencies] +boto3 = ">=1.2.3" +click = ">=5.1" +placebo = ">=0.8.1" +PyYAML = ">=3.11" + [[package]] name = "langchain" version = "0.3.27" @@ -2651,6 +2725,29 @@ tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "ole typing = ["typing-extensions ; python_version < \"3.10\""] xmp = ["defusedxml"] +[[package]] +name = "pip" +version = "25.2" +description = "The PyPA recommended tool for installing Python packages." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pip-25.2-py3-none-any.whl", hash = "sha256:6d67a2b4e7f14d8b31b8b52648866fa717f45a1eb70e83002f4331d07e953717"}, + {file = "pip-25.2.tar.gz", hash = "sha256:578283f006390f85bb6282dffb876454593d637f5d1be494b5202ce4877e71f2"}, +] + +[[package]] +name = "placebo" +version = "0.9.0" +description = "Make boto3 calls that look real but have no effect" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "placebo-0.9.0.tar.gz", hash = "sha256:03157f8527bbc2965b71b88f4a139ef8038618b346787f20d63e3c5da541b047"}, +] + [[package]] name = "platformdirs" version = "4.5.0" @@ -3336,6 +3433,24 @@ files = [ [package.extras] cli = ["click (>=5.0)"] +[[package]] +name = "python-slugify" +version = "8.0.4" +description = "A Python slugify application that also handles Unicode" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "python-slugify-8.0.4.tar.gz", hash = "sha256:59202371d1d05b54a9e7720c5e038f928f45daaffe41dd10822f3907b937c856"}, + {file = "python_slugify-8.0.4-py2.py3-none-any.whl", hash = "sha256:276540b79961052b66b7d116620b36518847f52d5fd9e3a70164fc8c50faa6b8"}, +] + +[package.dependencies] +text-unidecode = ">=1.3" + +[package.extras] +unidecode = ["Unidecode (>=1.1.1)"] + [[package]] name = "pyyaml" version = "6.0.3" @@ -3344,6 +3459,13 @@ optional = false python-versions = ">=3.8" groups = ["main", "dev"] files = [ + {file = "PyYAML-6.0.3-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:c2514fceb77bc5e7a2f7adfaa1feb2fb311607c9cb518dbc378688ec73d8292f"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c57bb8c96f6d1808c030b1687b9b5fb476abaa47f0db9c0101f5e9f394e97f4"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:efd7b85f94a6f21e4932043973a7ba2613b059c4a000551892ac9f1d11f5baf3"}, + {file = "PyYAML-6.0.3-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:22ba7cfcad58ef3ecddc7ed1db3409af68d023b7f940da23c6c2a1890976eda6"}, + {file = "PyYAML-6.0.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6344df0d5755a2c9a276d4473ae6b90647e216ab4757f8426893b5dd2ac3f369"}, + {file = "PyYAML-6.0.3-cp38-cp38-win32.whl", hash = "sha256:3ff07ec89bae51176c0549bc4c63aa6202991da2d9a6129d7aef7f1407d3f295"}, + {file = "PyYAML-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:5cf4e27da7e3fbed4d6c3d8e797387aaad68102272f8f9752883bc32d61cb87b"}, {file = "pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b"}, {file = "pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956"}, {file = "pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8"}, @@ -4007,6 +4129,27 @@ statsig = ["statsig (>=0.55.3)"] tornado = ["tornado (>=6)"] unleash = ["UnleashClient (>=6.0.1)"] +[[package]] +name = "setuptools" +version = "79.0.1" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-79.0.1-py3-none-any.whl", hash = "sha256:e147c0549f27767ba362f9da434eab9c5dc0045d5304feb602a0af001089fc51"}, + {file = "setuptools-79.0.1.tar.gz", hash = "sha256:128ce7b8f33c3079fd1b067ecbb4051a66e8526e7b65f6cec075dfc650ddfa88"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + [[package]] name = "six" version = "1.17.0" @@ -4069,14 +4212,61 @@ optional = false python-versions = ">=3.7" groups = ["main"] files = [ + {file = "SQLAlchemy-2.0.44-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:471733aabb2e4848d609141a9e9d56a427c0a038f4abf65dd19d7a21fd563632"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48bf7d383a35e668b984c805470518b635d48b95a3c57cb03f37eaa3551b5f9f"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf4bb6b3d6228fcf3a71b50231199fb94d2dd2611b66d33be0578ea3e6c2726"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e998cf7c29473bd077704cea3577d23123094311f59bdc4af551923b168332b1"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ebac3f0b5732014a126b43c2b7567f2f0e0afea7d9119a3378bde46d3dcad88e"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win32.whl", hash = "sha256:3255d821ee91bdf824795e936642bbf43a4c7cedf5d1aed8d24524e66843aa74"}, + {file = "SQLAlchemy-2.0.44-cp37-cp37m-win_amd64.whl", hash = "sha256:78e6c137ba35476adb5432103ae1534f2f5295605201d946a4198a0dea4b38e7"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7c77f3080674fc529b1bd99489378c7f63fcb4ba7f8322b79732e0258f0ea3ce"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26ef74ba842d61635b0152763d057c8d48215d5be9bb8b7604116a059e9985"}, {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4a172b31785e2f00780eccab00bc240ccdbfdb8345f1e6063175b3ff12ad1b0"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9480c0740aabd8cb29c329b422fb65358049840b34aba0adf63162371d2a96e"}, {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:17835885016b9e4d0135720160db3095dc78c583e7b902b6be799fb21035e749"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cbe4f85f50c656d753890f39468fcd8190c5f08282caf19219f684225bfd5fd2"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win32.whl", hash = "sha256:2fcc4901a86ed81dc76703f3b93ff881e08761c63263c46991081fd7f034b165"}, + {file = "sqlalchemy-2.0.44-cp310-cp310-win_amd64.whl", hash = "sha256:9919e77403a483ab81e3423151e8ffc9dd992c20d2603bf17e4a8161111e55f5"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fe3917059c7ab2ee3f35e77757062b1bea10a0b6ca633c58391e3f3c6c488dd"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:de4387a354ff230bc979b46b2207af841dc8bf29847b6c7dbe60af186d97aefa"}, {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3678a0fb72c8a6a29422b2732fe423db3ce119c34421b5f9955873eb9b62c1e"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3cf6872a23601672d61a68f390e44703442639a12ee9dd5a88bbce52a695e46e"}, {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:329aa42d1be9929603f406186630135be1e7a42569540577ba2c69952b7cf399"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:70e03833faca7166e6a9927fbee7c27e6ecde436774cd0b24bbcc96353bce06b"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win32.whl", hash = "sha256:253e2f29843fb303eca6b2fc645aca91fa7aa0aa70b38b6950da92d44ff267f3"}, + {file = "sqlalchemy-2.0.44-cp311-cp311-win_amd64.whl", hash = "sha256:7a8694107eb4308a13b425ca8c0e67112f8134c846b6e1f722698708741215d5"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:72fea91746b5890f9e5e0997f16cbf3d53550580d76355ba2d998311b17b2250"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:585c0c852a891450edbb1eaca8648408a3cc125f18cf433941fa6babcc359e29"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b94843a102efa9ac68a7a30cd46df3ff1ed9c658100d30a725d10d9c60a2f44"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:119dc41e7a7defcefc57189cfa0e61b1bf9c228211aba432b53fb71ef367fda1"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0765e318ee9179b3718c4fd7ba35c434f4dd20332fbc6857a5e8df17719c24d7"}, + {file = "sqlalchemy-2.0.44-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2e7b5b079055e02d06a4308d0481658e4f06bc7ef211567edc8f7d5dce52018d"}, {file = "sqlalchemy-2.0.44-cp312-cp312-win32.whl", hash = "sha256:846541e58b9a81cce7dee8329f352c318de25aa2f2bbe1e31587eb1f057448b4"}, {file = "sqlalchemy-2.0.44-cp312-cp312-win_amd64.whl", hash = "sha256:7cbcb47fd66ab294703e1644f78971f6f2f1126424d2b300678f419aa73c7b6e"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ff486e183d151e51b1d694c7aa1695747599bb00b9f5f604092b54b74c64a8e1"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b1af8392eb27b372ddb783b317dea0f650241cea5bd29199b22235299ca2e45"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b61188657e3a2b9ac4e8f04d6cf8e51046e28175f79464c67f2fd35bceb0976"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b87e7b91a5d5973dda5f00cd61ef72ad75a1db73a386b62877d4875a8840959c"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:15f3326f7f0b2bfe406ee562e17f43f36e16167af99c4c0df61db668de20002d"}, + {file = "sqlalchemy-2.0.44-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1e77faf6ff919aa8cd63f1c4e561cac1d9a454a191bb864d5dd5e545935e5a40"}, {file = "sqlalchemy-2.0.44-cp313-cp313-win32.whl", hash = "sha256:ee51625c2d51f8baadf2829fae817ad0b66b140573939dd69284d2ba3553ae73"}, {file = "sqlalchemy-2.0.44-cp313-cp313-win_amd64.whl", hash = "sha256:c1c80faaee1a6c3428cecf40d16a2365bcf56c424c92c2b6f0f9ad204b899e9e"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2fc44e5965ea46909a416fff0af48a219faefd5773ab79e5f8a5fcd5d62b2667"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:dc8b3850d2a601ca2320d081874033684e246d28e1c5e89db0864077cfc8f5a9"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d733dec0614bb8f4bcb7c8af88172b974f685a31dc3a65cca0527e3120de5606"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22be14009339b8bc16d6b9dc8780bacaba3402aa7581658e246114abbd2236e3"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:357bade0e46064f88f2c3a99808233e67b0051cdddf82992379559322dfeb183"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:4848395d932e93c1595e59a8672aa7400e8922c39bb9b0668ed99ac6fa867822"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win32.whl", hash = "sha256:2f19644f27c76f07e10603580a47278abb2a70311136a7f8fd27dc2e096b9013"}, + {file = "sqlalchemy-2.0.44-cp38-cp38-win_amd64.whl", hash = "sha256:1df4763760d1de0dfc8192cc96d8aa293eb1a44f8f7a5fbe74caf1b551905c5e"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f7027414f2b88992877573ab780c19ecb54d3a536bef3397933573d6b5068be4"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fe166c7d00912e8c10d3a9a0ce105569a31a3d0db1a6e82c4e0f4bf16d5eca9"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3caef1ff89b1caefc28f0368b3bde21a7e3e630c2eddac16abd9e47bd27cc36a"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc2856d24afa44295735e72f3c75d6ee7fdd4336d8d3a8f3d44de7aa6b766df2"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11bac86b0deada30b6b5f93382712ff0e911fe8d31cb9bf46e6b149ae175eff0"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:4d18cd0e9a0f37c9f4088e50e3839fcb69a380a0ec957408e0b57cff08ee0a26"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win32.whl", hash = "sha256:9e9018544ab07614d591a26c1bd4293ddf40752cc435caf69196740516af7100"}, + {file = "sqlalchemy-2.0.44-cp39-cp39-win_amd64.whl", hash = "sha256:8e0e4e66fd80f277a8c3de016a81a554e76ccf6b8d881ee0b53200305a8433f6"}, {file = "sqlalchemy-2.0.44-py3-none-any.whl", hash = "sha256:19de7ca1246fbef9f9d1bff8f1ab25641569df226364a0e40457dc5457c54b05"}, {file = "sqlalchemy-2.0.44.tar.gz", hash = "sha256:0ae7454e1ab1d780aee69fd2aae7d6b8670a581d8847f2d1e0f7ddfbf47e5a22"}, ] @@ -4202,6 +4392,18 @@ files = [ doc = ["reno", "sphinx"] test = ["pytest", "tornado (>=4.5)", "typeguard"] +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] + [[package]] name = "thefuzz" version = "0.22.1" @@ -4217,6 +4419,18 @@ files = [ [package.dependencies] rapidfuzz = ">=3.0.0,<4.0.0" +[[package]] +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] +files = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] + [[package]] name = "tqdm" version = "4.67.1" @@ -4239,6 +4453,24 @@ notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] +[[package]] +name = "troposphere" +version = "4.9.4" +description = "AWS CloudFormation creation library" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "troposphere-4.9.4-py3-none-any.whl", hash = "sha256:45d1b600e5a0d0678416eaf5a48ee66f68d771e0c0e7ee695b6ebc8c93fb0e02"}, + {file = "troposphere-4.9.4.tar.gz", hash = "sha256:55af51da7a634960193ed054146cfa8656f5a8a7b0027aa7f200506e25058b08"}, +] + +[package.dependencies] +cfn_flip = ">=1.0.2" + +[package.extras] +policy = ["awacs (>=2.0.0)"] + [[package]] name = "typing-extensions" version = "4.15.0" @@ -4349,6 +4581,39 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"GraalVM\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] +[[package]] +name = "werkzeug" +version = "3.1.3" +description = "The comprehensive WSGI web application library." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, +] + +[package.dependencies] +MarkupSafe = ">=2.1.1" + +[package.extras] +watchdog = ["watchdog (>=2.3)"] + +[[package]] +name = "wheel" +version = "0.45.1" +description = "A built-package format for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "wheel-0.45.1-py3-none-any.whl", hash = "sha256:708e7481cc80179af0e556bbf0cc00b8444c7321e2700b8d8580231d13017248"}, + {file = "wheel-0.45.1.tar.gz", hash = "sha256:661e1abd9198507b1409a20c02106d9670b2576e916d58f520316666abca6729"}, +] + +[package.extras] +test = ["pytest (>=6.0.0)", "setuptools (>=65)"] + [[package]] name = "yarl" version = "1.22.0" @@ -4494,6 +4759,38 @@ idna = ">=2.0" multidict = ">=4.0" propcache = ">=0.2.1" +[[package]] +name = "zappa" +version = "0.60.2" +description = "Server-less Python Web Services for AWS Lambda and API Gateway" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zappa-0.60.2-py3-none-any.whl", hash = "sha256:876b81210cc152b617b3ddd988a9376023d0b00e47a195d285d646198ef45563"}, + {file = "zappa-0.60.2.tar.gz", hash = "sha256:f20a07e8e447c954c50adafe93dd10c0c8261d70e7818bb2d4ba6023c6114542"}, +] + +[package.dependencies] +argcomplete = "*" +boto3 = ">=1.17.28" +durationpy = "*" +hjson = "*" +jmespath = "*" +kappa = "0.6.0" +pip = ">=24.0.0" +placebo = "<0.10" +python-dateutil = "*" +python-slugify = "*" +pyyaml = "*" +requests = ">=2.32.0" +setuptools = "<80.0.0" +toml = "*" +tqdm = ">=4.66.3" +troposphere = ">=3.0" +werkzeug = "*" +wheel = "*" + [[package]] name = "zstandard" version = "0.25.0" @@ -4609,4 +4906,4 @@ cffi = ["cffi (>=1.17,<2.0) ; platform_python_implementation != \"PyPy\" and pyt [metadata] lock-version = "2.1" python-versions = "^3.13" -content-hash = "9d8bc478fcdde3cf8429f8bd6b67ae17444dcbd0863b51188db45d515c65b217" +content-hash = "f1503182759d8bb911ce3c558af1e49c81d055b31a8be48f88601b87bc975a88" diff --git a/backend/pyproject.toml b/backend/pyproject.toml index 9437be25e6..10db495db8 100644 --- a/backend/pyproject.toml +++ b/backend/pyproject.toml @@ -48,6 +48,7 @@ strawberry-graphql = { extras = [ "django" ], version = "^0.283.2" } strawberry-graphql-django = "^0.66.0" thefuzz = "^0.22.1" pyparsing = "^3.2.3" +zappa = "^0.60.2" [tool.poetry.group.dev.dependencies] djlint = "^1.36.4" diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json new file mode 100644 index 0000000000..11f2a9e685 --- /dev/null +++ b/backend/zappa_settings.example.json @@ -0,0 +1,34 @@ +{ + "staging": { + "app_function": "wsgi.application", + "django_settings": "settings.staging", + "environment_variables": { + "DJANGO_ALGOLIA_APPLICATION_ID": "${DJANGO_ALGOLIA_APPLICATION_ID}", + "DJANGO_ALGOLIA_WRITE_API_KEY": "${DJANGO_ALGOLIA_WRITE_API_KEY}", + "DJANGO_ALLOWED_HOSTS": "${DJANGO_ALLOWED_HOSTS}", + "DJANGO_AWS_ACCESS_KEY_ID": "${DJANGO_AWS_ACCESS_KEY_ID}", + "DJANGO_AWS_SECRET_ACCESS_KEY": "${DJANGO_AWS_SECRET_ACCESS_KEY}", + "DJANGO_CONFIGURATION": "Staging", + "DJANGO_DB_HOST": "${DJANGO_DB_HOST}", + "DJANGO_DB_NAME": "${DJANGO_DB_NAME}", + "DJANGO_DB_USER": "${DJANGO_DB_USER}", + "DJANGO_DB_PORT": "${DJANGO_DB_PORT}", + "DJANGO_DB_PASSWORD": "${DJANGO_DB_PASSWORD}", + "DJANGO_OPEN_AI_SECRET_KEY": "${DJANGO_OPEN_AI_SECRET_KEY}", + "DJANGO_REDIS_HOST": "${DJANGO_REDIS_HOST}", + "DJANGO_REDIS_PASSWORD": "${DJANGO_REDIS_PASSWORD}", + "DJANGO_SECRET_KEY": "${DJANGO_SECRET_KEY}", + "DJANGO_SENTRY_DSN": "${DJANGO_SENTRY_DSN}", + "DJANGO_SLACK_BOT_TOKEN": "${DJANGO_SLACK_BOT_TOKEN}", + "DJANGO_SLACK_SIGNING_SECRET": "${DJANGO_SLACK_SIGNING_SECRET}" + }, + "project_name": "nest-backend", + "runtime": "python3.13", + "s3_bucket": "${ZAPPA_S3_BUCKET}", + "slim_handler": true, + "vpc_config": { + "SecurityGroupIds": ["${VPC_SECURITY_GROUP}"], + "SubnetIds": ["${VPC_SUBNET_A}", "${VPC_SUBNET_B}"] + } + } +} From aa4eea0b210ced4a639fa761ef80ebd74d8abafe Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 17 Oct 2025 20:34:54 +0530 Subject: [PATCH 02/35] add temporary IaC code for testing --- .gitignore | 4 + backend/zappa_settings.example.json | 2 + infrastructure/.terraform.lock.hcl | 43 +++++++ infrastructure/main.tf | 64 ++++++++++ infrastructure/modules/cache/main.tf | 58 +++++++++ infrastructure/modules/cache/outputs.tf | 10 ++ infrastructure/modules/cache/variables.tf | 46 +++++++ infrastructure/modules/database/main.tf | 60 +++++++++ infrastructure/modules/database/outputs.tf | 10 ++ infrastructure/modules/database/variables.tf | 51 ++++++++ infrastructure/modules/iam/main.tf | 105 ++++++++++++++++ infrastructure/modules/iam/variables.tf | 14 +++ infrastructure/modules/networking/main.tf | 115 ++++++++++++++++++ infrastructure/modules/networking/outputs.tf | 14 +++ .../modules/networking/variables.tf | 29 +++++ infrastructure/modules/security/main.tf | 72 +++++++++++ infrastructure/modules/security/outputs.tf | 14 +++ infrastructure/modules/security/variables.tf | 24 ++++ infrastructure/modules/storage/main.tf | 56 +++++++++ infrastructure/modules/storage/outputs.tf | 4 + infrastructure/modules/storage/variables.tf | 14 +++ infrastructure/outputs.tf | 21 ++++ infrastructure/providers.tf | 3 + infrastructure/variables.tf | 115 ++++++++++++++++++ 24 files changed, 948 insertions(+) create mode 100644 infrastructure/.terraform.lock.hcl create mode 100644 infrastructure/main.tf create mode 100644 infrastructure/modules/cache/main.tf create mode 100644 infrastructure/modules/cache/outputs.tf create mode 100644 infrastructure/modules/cache/variables.tf create mode 100644 infrastructure/modules/database/main.tf create mode 100644 infrastructure/modules/database/outputs.tf create mode 100644 infrastructure/modules/database/variables.tf create mode 100644 infrastructure/modules/iam/main.tf create mode 100644 infrastructure/modules/iam/variables.tf create mode 100644 infrastructure/modules/networking/main.tf create mode 100644 infrastructure/modules/networking/outputs.tf create mode 100644 infrastructure/modules/networking/variables.tf create mode 100644 infrastructure/modules/security/main.tf create mode 100644 infrastructure/modules/security/outputs.tf create mode 100644 infrastructure/modules/security/variables.tf create mode 100644 infrastructure/modules/storage/main.tf create mode 100644 infrastructure/modules/storage/outputs.tf create mode 100644 infrastructure/modules/storage/variables.tf create mode 100644 infrastructure/outputs.tf create mode 100644 infrastructure/providers.tf create mode 100644 infrastructure/variables.tf diff --git a/.gitignore b/.gitignore index e089996112..ea7b351153 100644 --- a/.gitignore +++ b/.gitignore @@ -17,6 +17,9 @@ __pycache__ .python_history .python-version .ruff_cache +**/.terraform/ +*.tfstate +*.tfstate.* .venv/ .vscode *.code-workspace @@ -39,6 +42,7 @@ frontend/pnpm-debug.log* frontend/test-results/ frontend/yarn-debug.log* frontend/yarn-error.log* +infrastructure/terraform.tfvars logs node_modules/ TODO diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json index 11f2a9e685..78fecece16 100644 --- a/backend/zappa_settings.example.json +++ b/backend/zappa_settings.example.json @@ -22,7 +22,9 @@ "DJANGO_SLACK_BOT_TOKEN": "${DJANGO_SLACK_BOT_TOKEN}", "DJANGO_SLACK_SIGNING_SECRET": "${DJANGO_SLACK_SIGNING_SECRET}" }, + "manage_roles": false, "project_name": "nest-backend", + "role_arn": "${ZAPPA_ROLE_ARM}", "runtime": "python3.13", "s3_bucket": "${ZAPPA_S3_BUCKET}", "slim_handler": true, diff --git a/infrastructure/.terraform.lock.hcl b/infrastructure/.terraform.lock.hcl new file mode 100644 index 0000000000..8f874e9474 --- /dev/null +++ b/infrastructure/.terraform.lock.hcl @@ -0,0 +1,43 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} diff --git a/infrastructure/main.tf b/infrastructure/main.tf new file mode 100644 index 0000000000..e87295f931 --- /dev/null +++ b/infrastructure/main.tf @@ -0,0 +1,64 @@ +module "networking" { + source = "./modules/networking" + + vpc_cidr = var.vpc_cidr + public_subnet_cidrs = var.public_subnet_cidrs + private_subnet_cidrs = var.private_subnet_cidrs + availability_zones = var.availability_zones + project_name = var.project_name + environment = var.environment +} + +module "security" { + source = "./modules/security" + + vpc_id = module.networking.vpc_id + db_port = var.db_port + redis_port = var.redis_port + project_name = var.project_name + environment = var.environment +} + +module "storage" { + source = "./modules/storage" + + zappa_s3_bucket = var.zappa_s3_bucket + project_name = var.project_name + environment = var.environment +} + +module "iam" { + source = "./modules/iam" + + s3_bucket_arn = module.storage.zappa_s3_bucket_arn + project_name = var.project_name + environment = var.environment +} + +module "database" { + source = "./modules/database" + + db_allocated_storage = var.db_allocated_storage + db_engine_version = var.db_engine_version + db_instance_class = var.db_instance_class + db_name = var.db_name + db_password = var.db_password + db_username = var.db_username + db_subnet_ids = module.networking.private_subnet_ids + security_group_ids = [module.security.rds_sg_id] + project_name = var.project_name + environment = var.environment +} + +module "cache" { + source = "./modules/cache" + + redis_engine_version = var.redis_engine_version + redis_node_type = var.redis_node_type + redis_num_cache_nodes = var.redis_num_cache_nodes + redis_port = var.redis_port + subnet_ids = module.networking.private_subnet_ids + security_group_ids = [module.security.redis_sg_id] + project_name = var.project_name + environment = var.environment +} diff --git a/infrastructure/modules/cache/main.tf b/infrastructure/modules/cache/main.tf new file mode 100644 index 0000000000..0807827f2c --- /dev/null +++ b/infrastructure/modules/cache/main.tf @@ -0,0 +1,58 @@ +# ElastiCache Subnet Group +resource "aws_elasticache_subnet_group" "main" { + name = "${var.project_name}-${var.environment}-cache-subnet-group" + subnet_ids = var.subnet_ids + + tags = { + Name = "${var.project_name}-${var.environment}-cache-subnet-group" + } +} + +# Random auth token for Redis (if not provided) +resource "random_password" "redis_auth_token" { + count = var.redis_auth_token == null || var.redis_auth_token == "" ? 1 : 0 + + length = 32 + special = true + # Redis auth token has specific requirements + override_special = "!&#$^<>-" +} + +# ElastiCache Redis Replication Group +resource "aws_elasticache_replication_group" "main" { + replication_group_id = "${var.project_name}-${var.environment}-cache" + description = "${var.project_name} ${var.environment} Redis cache" + + engine = "redis" + engine_version = var.redis_engine_version + node_type = var.redis_node_type + port = var.redis_port + parameter_group_name = "default.redis7" + + # Cluster configuration + num_cache_clusters = var.redis_num_cache_nodes + + # Network configuration + subnet_group_name = aws_elasticache_subnet_group.main.name + security_group_ids = var.security_group_ids + + # Security + at_rest_encryption_enabled = true + transit_encryption_enabled = true + auth_token = var.redis_auth_token != null && var.redis_auth_token != "" ? var.redis_auth_token : random_password.redis_auth_token[0].result + + # Maintenance and backups + snapshot_retention_limit = 5 + snapshot_window = "03:00-05:00" + maintenance_window = "mon:05:00-mon:07:00" + + # Automatic failover (requires at least 2 nodes) + automatic_failover_enabled = var.redis_num_cache_nodes > 1 + + # Enable automatic minor version upgrades + auto_minor_version_upgrade = true + + tags = { + Name = "${var.project_name}-${var.environment}-redis" + } +} diff --git a/infrastructure/modules/cache/outputs.tf b/infrastructure/modules/cache/outputs.tf new file mode 100644 index 0000000000..a15d4c8a15 --- /dev/null +++ b/infrastructure/modules/cache/outputs.tf @@ -0,0 +1,10 @@ +output "redis_primary_endpoint" { + description = "The primary endpoint of the Redis replication group" + value = aws_elasticache_replication_group.main.primary_endpoint_address +} + +output "redis_auth_token" { + description = "The auth token for Redis" + value = random_password.redis_auth_token[0].result + sensitive = true +} diff --git a/infrastructure/modules/cache/variables.tf b/infrastructure/modules/cache/variables.tf new file mode 100644 index 0000000000..38810d8424 --- /dev/null +++ b/infrastructure/modules/cache/variables.tf @@ -0,0 +1,46 @@ +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "redis_auth_token" { + description = "The auth token for Redis" + type = string + sensitive = true + default = null +} + +variable "redis_engine_version" { + description = "The version of the Redis engine" + type = string +} + +variable "redis_node_type" { + description = "The node type for the Redis cache" + type = string +} + +variable "redis_num_cache_nodes" { + description = "The number of cache nodes in the Redis cluster" + type = number +} + +variable "redis_port" { + description = "The port for the Redis cache" + type = number +} + +variable "security_group_ids" { + description = "A list of security group IDs to associate with the Redis cache" + type = list(string) +} + +variable "subnet_ids" { + description = "A list of subnet IDs for the cache subnet group" + type = list(string) +} diff --git a/infrastructure/modules/database/main.tf b/infrastructure/modules/database/main.tf new file mode 100644 index 0000000000..a52574f8e3 --- /dev/null +++ b/infrastructure/modules/database/main.tf @@ -0,0 +1,60 @@ +# DB Subnet Group +resource "aws_db_subnet_group" "main" { + name = "${var.project_name}-${var.environment}-db-subnet-group" + subnet_ids = var.db_subnet_ids + + tags = { + Name = "${var.project_name}-${var.environment}-db-subnet-group" + } +} + +# Random password for RDS (if not provided) +resource "random_password" "db_password" { + count = var.db_password == null || var.db_password == "" ? 1 : 0 + + length = 32 + special = true + # Avoid special characters that might cause issues + override_special = "!#$%&*()-_=+[]{}<>:?" +} + +# RDS PostgreSQL Instance +resource "aws_db_instance" "main" { + identifier = "${var.project_name}-${var.environment}-db" + engine = "postgres" + engine_version = var.db_engine_version + instance_class = var.db_instance_class + allocated_storage = var.db_allocated_storage + storage_type = "gp3" + storage_encrypted = true + + db_name = var.db_name + username = var.db_username + password = var.db_password != null && var.db_password != "" ? var.db_password : random_password.db_password[0].result + + db_subnet_group_name = aws_db_subnet_group.main.name + vpc_security_group_ids = var.security_group_ids + publicly_accessible = false + + backup_retention_period = 7 + backup_window = "03:00-04:00" + maintenance_window = "mon:04:00-mon:05:00" + + # Enable automated backups + skip_final_snapshot = true + final_snapshot_identifier = "${var.project_name}-${var.environment}-final-snapshot-${formatdate("YYYY-MM-DD-hhmm", timestamp())}" + copy_tags_to_snapshot = true + + # Performance Insights + enabled_cloudwatch_logs_exports = ["postgresql", "upgrade"] + + tags = { + Name = "${var.project_name}-${var.environment}-postgres" + } + + lifecycle { + ignore_changes = [ + final_snapshot_identifier + ] + } +} diff --git a/infrastructure/modules/database/outputs.tf b/infrastructure/modules/database/outputs.tf new file mode 100644 index 0000000000..339eecd8a0 --- /dev/null +++ b/infrastructure/modules/database/outputs.tf @@ -0,0 +1,10 @@ +output "db_instance_endpoint" { + description = "The endpoint of the RDS instance" + value = aws_db_instance.main.endpoint +} + +output "db_password" { + description = "The password for the RDS database" + value = random_password.db_password[0].result + sensitive = true +} diff --git a/infrastructure/modules/database/variables.tf b/infrastructure/modules/database/variables.tf new file mode 100644 index 0000000000..eb878381cf --- /dev/null +++ b/infrastructure/modules/database/variables.tf @@ -0,0 +1,51 @@ +variable "db_allocated_storage" { + description = "The allocated storage for the RDS database in GB" + type = number +} + +variable "db_engine_version" { + description = "The version of the PostgreSQL engine" + type = string +} + +variable "db_instance_class" { + description = "The instance class for the RDS database" + type = string +} + +variable "db_name" { + description = "The name of the RDS database" + type = string +} + +variable "db_password" { + description = "The password for the RDS database" + type = string + sensitive = true + default = null +} + +variable "db_subnet_ids" { + description = "A list of subnet IDs for the DB subnet group" + type = list(string) +} + +variable "db_username" { + description = "The username for the RDS database" + type = string +} + +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "security_group_ids" { + description = "A list of security group IDs to associate with the RDS database" + type = list(string) +} diff --git a/infrastructure/modules/iam/main.tf b/infrastructure/modules/iam/main.tf new file mode 100644 index 0000000000..59dbe2e7ee --- /dev/null +++ b/infrastructure/modules/iam/main.tf @@ -0,0 +1,105 @@ +# IAM Role for Lambda Execution (Zappa) +resource "aws_iam_role" "zappa_execution" { + name = "${var.project_name}-${var.environment}-ZappaLambdaExecutionRole" + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Sid = "" + Effect = "Allow" + Principal = { + Service = [ + "lambda.amazonaws.com", + "apigateway.amazonaws.com", + "events.amazonaws.com" + ] + } + Action = "sts:AssumeRole" + } + ] + }) + + tags = { + Name = "${var.project_name}-${var.environment}-zappa-execution-role" + } +} + +# Attach AWS managed policy for Lambda VPC execution +resource "aws_iam_role_policy_attachment" "lambda_vpc_execution" { + role = aws_iam_role.zappa_execution.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" +} + +# Attach AWS managed policy for basic Lambda execution +resource "aws_iam_role_policy_attachment" "lambda_basic_execution" { + role = aws_iam_role.zappa_execution.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" +} + +# Custom policy for S3 access (Zappa deployments) +resource "aws_iam_role_policy" "s3_access" { + name = "${var.project_name}-${var.environment}-s3-access" + role = aws_iam_role.zappa_execution.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "s3:GetObject", + "s3:PutObject", + "s3:DeleteObject", + "s3:ListBucket" + ] + Resource = [ + var.s3_bucket_arn, + "${var.s3_bucket_arn}/*" + ] + } + ] + }) +} + +# Custom policy for CloudWatch Logs +resource "aws_iam_role_policy" "cloudwatch_logs" { + name = "${var.project_name}-${var.environment}-cloudwatch-logs" + role = aws_iam_role.zappa_execution.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "logs:CreateLogGroup", + "logs:CreateLogStream", + "logs:PutLogEvents", + "logs:DescribeLogStreams" + ] + Resource = "arn:aws:logs:*:*:*" + } + ] + }) +} + +# Custom policy for X-Ray tracing (optional, but good for debugging) +resource "aws_iam_role_policy" "xray" { + name = "${var.project_name}-${var.environment}-xray" + role = aws_iam_role.zappa_execution.id + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Effect = "Allow" + Action = [ + "xray:PutTraceSegments", + "xray:PutTelemetryRecords" + ] + Resource = "*" + } + ] + }) +} diff --git a/infrastructure/modules/iam/variables.tf b/infrastructure/modules/iam/variables.tf new file mode 100644 index 0000000000..ff663d2b7f --- /dev/null +++ b/infrastructure/modules/iam/variables.tf @@ -0,0 +1,14 @@ +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "s3_bucket_arn" { + description = "The ARN of the S3 bucket for Zappa deployments" + type = string +} diff --git a/infrastructure/modules/networking/main.tf b/infrastructure/modules/networking/main.tf new file mode 100644 index 0000000000..3c46147394 --- /dev/null +++ b/infrastructure/modules/networking/main.tf @@ -0,0 +1,115 @@ +# VPC +resource "aws_vpc" "main" { + cidr_block = var.vpc_cidr + enable_dns_hostnames = true + enable_dns_support = true + + tags = { + Name = "${var.project_name}-${var.environment}-vpc" + } +} + +# Internet Gateway +resource "aws_internet_gateway" "main" { + vpc_id = aws_vpc.main.id + + tags = { + Name = "${var.project_name}-${var.environment}-igw" + } +} + +# Public Subnets +resource "aws_subnet" "public" { + count = length(var.public_subnet_cidrs) + + vpc_id = aws_vpc.main.id + cidr_block = var.public_subnet_cidrs[count.index] + availability_zone = var.availability_zones[count.index] + map_public_ip_on_launch = true + + tags = { + Name = "${var.project_name}-${var.environment}-public-${var.availability_zones[count.index]}" + Type = "Public" + } +} + +# Private Subnets +resource "aws_subnet" "private" { + count = length(var.private_subnet_cidrs) + + vpc_id = aws_vpc.main.id + cidr_block = var.private_subnet_cidrs[count.index] + availability_zone = var.availability_zones[count.index] + + tags = { + Name = "${var.project_name}-${var.environment}-private-${var.availability_zones[count.index]}" + Type = "Private" + } +} + +# Elastic IP for NAT Gateway +resource "aws_eip" "nat" { + domain = "vpc" + + tags = { + Name = "${var.project_name}-${var.environment}-nat-eip" + } + + depends_on = [aws_internet_gateway.main] +} + +# NAT Gateway (in first public subnet) +resource "aws_nat_gateway" "main" { + allocation_id = aws_eip.nat.id + subnet_id = aws_subnet.public[0].id + + tags = { + Name = "${var.project_name}-${var.environment}-nat" + } + + depends_on = [aws_internet_gateway.main] +} + +# Public Route Table +resource "aws_route_table" "public" { + vpc_id = aws_vpc.main.id + + route { + cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.main.id + } + + tags = { + Name = "${var.project_name}-${var.environment}-public-rt" + } +} + +# Private Route Table +resource "aws_route_table" "private" { + vpc_id = aws_vpc.main.id + + route { + cidr_block = "0.0.0.0/0" + nat_gateway_id = aws_nat_gateway.main.id + } + + tags = { + Name = "${var.project_name}-${var.environment}-private-rt" + } +} + +# Associate public subnets with public route table +resource "aws_route_table_association" "public" { + count = length(aws_subnet.public) + + subnet_id = aws_subnet.public[count.index].id + route_table_id = aws_route_table.public.id +} + +# Associate private subnets with private route table +resource "aws_route_table_association" "private" { + count = length(aws_subnet.private) + + subnet_id = aws_subnet.private[count.index].id + route_table_id = aws_route_table.private.id +} diff --git a/infrastructure/modules/networking/outputs.tf b/infrastructure/modules/networking/outputs.tf new file mode 100644 index 0000000000..ca4f0573cb --- /dev/null +++ b/infrastructure/modules/networking/outputs.tf @@ -0,0 +1,14 @@ +output "vpc_id" { + description = "The ID of the VPC" + value = aws_vpc.main.id +} + +output "public_subnet_ids" { + description = "A list of public subnet IDs" + value = aws_subnet.public[*].id +} + +output "private_subnet_ids" { + description = "A list of private subnet IDs" + value = aws_subnet.private[*].id +} diff --git a/infrastructure/modules/networking/variables.tf b/infrastructure/modules/networking/variables.tf new file mode 100644 index 0000000000..3af07da20e --- /dev/null +++ b/infrastructure/modules/networking/variables.tf @@ -0,0 +1,29 @@ +variable "availability_zones" { + description = "A list of availability zones for the VPC" + type = list(string) +} + +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "private_subnet_cidrs" { + description = "A list of CIDR blocks for the private subnets" + type = list(string) +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "public_subnet_cidrs" { + description = "A list of CIDR blocks for the public subnets" + type = list(string) +} + +variable "vpc_cidr" { + description = "The CIDR block for the VPC" + type = string +} diff --git a/infrastructure/modules/security/main.tf b/infrastructure/modules/security/main.tf new file mode 100644 index 0000000000..58cf9e193c --- /dev/null +++ b/infrastructure/modules/security/main.tf @@ -0,0 +1,72 @@ +# Lambda Security Group +resource "aws_security_group" "lambda" { + name = "${var.project_name}-${var.environment}-lambda-sg" + description = "Security group for Lambda functions (Zappa)" + vpc_id = var.vpc_id + + egress { + description = "Allow all outbound traffic" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "${var.project_name}-${var.environment}-lambda-sg" + } +} + +# RDS Security Group +resource "aws_security_group" "rds" { + name = "${var.project_name}-${var.environment}-rds-sg" + description = "Security group for RDS PostgreSQL" + vpc_id = var.vpc_id + + ingress { + description = "PostgreSQL from Lambda" + from_port = var.db_port + to_port = var.db_port + protocol = "tcp" + security_groups = [aws_security_group.lambda.id] + } + + egress { + description = "Allow all outbound traffic" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "${var.project_name}-${var.environment}-rds-sg" + } +} + +# ElastiCache Security Group +resource "aws_security_group" "redis" { + name = "${var.project_name}-${var.environment}-redis-sg" + description = "Security group for ElastiCache Redis" + vpc_id = var.vpc_id + + ingress { + description = "Redis from Lambda" + from_port = var.redis_port + to_port = var.redis_port + protocol = "tcp" + security_groups = [aws_security_group.lambda.id] + } + + egress { + description = "Allow all outbound traffic" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "${var.project_name}-${var.environment}-redis-sg" + } +} diff --git a/infrastructure/modules/security/outputs.tf b/infrastructure/modules/security/outputs.tf new file mode 100644 index 0000000000..adbf50a498 --- /dev/null +++ b/infrastructure/modules/security/outputs.tf @@ -0,0 +1,14 @@ +output "lambda_sg_id" { + description = "The ID of the Lambda security group" + value = aws_security_group.lambda.id +} + +output "rds_sg_id" { + description = "The ID of the RDS security group" + value = aws_security_group.rds.id +} + +output "redis_sg_id" { + description = "The ID of the Redis security group" + value = aws_security_group.redis.id +} diff --git a/infrastructure/modules/security/variables.tf b/infrastructure/modules/security/variables.tf new file mode 100644 index 0000000000..7c86b46554 --- /dev/null +++ b/infrastructure/modules/security/variables.tf @@ -0,0 +1,24 @@ +variable "db_port" { + description = "The port for the RDS database" + type = number +} + +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "redis_port" { + description = "The port for the Redis cache" + type = number +} + +variable "vpc_id" { + description = "The ID of the VPC" + type = string +} diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf new file mode 100644 index 0000000000..6e8d4ef5e1 --- /dev/null +++ b/infrastructure/modules/storage/main.tf @@ -0,0 +1,56 @@ +# S3 Bucket for Zappa Deployments +resource "aws_s3_bucket" "zappa" { + bucket = var.zappa_s3_bucket + + tags = { + Name = "${var.project_name}-${var.environment}-zappa-deployments" + } +} + +# Block public access +resource "aws_s3_bucket_public_access_block" "zappa" { + bucket = aws_s3_bucket.zappa.id + + block_public_acls = true + block_public_policy = true + ignore_public_acls = true + restrict_public_buckets = true +} + +# Enable versioning +resource "aws_s3_bucket_versioning" "zappa" { + bucket = aws_s3_bucket.zappa.id + + versioning_configuration { + status = "Enabled" + } +} + +# Server-side encryption +resource "aws_s3_bucket_server_side_encryption_configuration" "zappa" { + bucket = aws_s3_bucket.zappa.id + + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "AES256" + } + } +} + +# Lifecycle rule to clean up old versions +resource "aws_s3_bucket_lifecycle_configuration" "zappa" { + bucket = aws_s3_bucket.zappa.id + + rule { + id = "delete-old-versions" + status = "Enabled" + + noncurrent_version_expiration { + noncurrent_days = 30 + } + + abort_incomplete_multipart_upload { + days_after_initiation = 7 + } + } +} diff --git a/infrastructure/modules/storage/outputs.tf b/infrastructure/modules/storage/outputs.tf new file mode 100644 index 0000000000..79d46ece85 --- /dev/null +++ b/infrastructure/modules/storage/outputs.tf @@ -0,0 +1,4 @@ +output "zappa_s3_bucket_arn" { + description = "The ARN of the S3 bucket for Zappa deployments" + value = aws_s3_bucket.zappa.arn +} diff --git a/infrastructure/modules/storage/variables.tf b/infrastructure/modules/storage/variables.tf new file mode 100644 index 0000000000..472e4d71cc --- /dev/null +++ b/infrastructure/modules/storage/variables.tf @@ -0,0 +1,14 @@ +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "zappa_s3_bucket" { + description = "The name of the S3 bucket for Zappa deployments" + type = string +} diff --git a/infrastructure/outputs.tf b/infrastructure/outputs.tf new file mode 100644 index 0000000000..989898a4c0 --- /dev/null +++ b/infrastructure/outputs.tf @@ -0,0 +1,21 @@ +output "database_endpoint" { + description = "The endpoint of the RDS database" + value = module.database.db_instance_endpoint +} + +output "redis_endpoint" { + description = "The endpoint of the Redis cache" + value = module.cache.redis_primary_endpoint +} + +output "db_password" { + description = "The password for the RDS database" + value = module.database.db_password + sensitive = true +} + +output "redis_auth_token" { + description = "The auth token for Redis" + value = module.cache.redis_auth_token + sensitive = true +} diff --git a/infrastructure/providers.tf b/infrastructure/providers.tf new file mode 100644 index 0000000000..3aafc50044 --- /dev/null +++ b/infrastructure/providers.tf @@ -0,0 +1,3 @@ +provider "aws" { + region = "ap-south-1" +} diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf new file mode 100644 index 0000000000..2fd0dbb5a3 --- /dev/null +++ b/infrastructure/variables.tf @@ -0,0 +1,115 @@ +variable "availability_zones" { + description = "A list of availability zones for the VPC" + type = list(string) + default = ["us-east-1a", "us-east-1b"] +} + +variable "db_allocated_storage" { + description = "The allocated storage for the RDS database in GB" + type = number + default = 20 +} + +variable "db_engine_version" { + description = "The version of the PostgreSQL engine" + type = string + default = "16.10" +} + +variable "db_instance_class" { + description = "The instance class for the RDS database" + type = string + default = "db.t3.micro" +} + +variable "db_name" { + description = "The name of the RDS database" + type = string + default = "nestdb" +} + +variable "db_password" { + description = "The password for the RDS database" + type = string + sensitive = true + default = null +} + +variable "db_port" { + description = "The port for the RDS database" + type = number + default = 5432 +} + +variable "db_username" { + description = "The username for the RDS database" + type = string + default = "nestuser" +} + +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string + default = "staging" +} + +variable "private_subnet_cidrs" { + description = "A list of CIDR blocks for the private subnets" + type = list(string) + default = ["10.0.3.0/24", "10.0.4.0/24"] +} + +variable "project_name" { + description = "The name of the project" + type = string + default = "nest" +} + +variable "public_subnet_cidrs" { + description = "A list of CIDR blocks for the public subnets" + type = list(string) + default = ["10.0.1.0/24", "10.0.2.0/24"] +} + +variable "redis_auth_token" { + description = "The auth token for Redis" + type = string + sensitive = true + default = null +} + +variable "redis_engine_version" { + description = "The version of the Redis engine" + type = string + default = "7.0" +} + +variable "redis_node_type" { + description = "The node type for the Redis cache" + type = string + default = "cache.t3.micro" +} + +variable "redis_num_cache_nodes" { + description = "The number of cache nodes in the Redis cluster" + type = number + default = 1 +} + +variable "redis_port" { + description = "The port for the Redis cache" + type = number + default = 6379 +} + +variable "vpc_cidr" { + description = "The CIDR block for the VPC" + type = string + default = "10.0.0.0/16" +} + +variable "zappa_s3_bucket" { + description = "The name of the S3 bucket for Zappa deployments" + type = string + default = "nest-zappa-deployments" +} From 18b8adb28d78b42e864e8a00a59ee7cbed0cd41b Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 17 Oct 2025 20:54:01 +0530 Subject: [PATCH 03/35] Update code --- backend/zappa_settings.example.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json index 78fecece16..4f7fdb2905 100644 --- a/backend/zappa_settings.example.json +++ b/backend/zappa_settings.example.json @@ -24,7 +24,7 @@ }, "manage_roles": false, "project_name": "nest-backend", - "role_arn": "${ZAPPA_ROLE_ARM}", + "role_arn": "${ZAPPA_ROLE_ARN}", "runtime": "python3.13", "s3_bucket": "${ZAPPA_S3_BUCKET}", "slim_handler": true, From 481b9284a0771393a6e3c0bd3c13a06700c53588 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Sat, 18 Oct 2025 14:23:15 +0530 Subject: [PATCH 04/35] Remove IAM and let Zappa manage it --- backend/zappa_settings.example.json | 3 +- infrastructure/main.tf | 6 -- infrastructure/modules/iam/main.tf | 105 ------------------------ infrastructure/modules/iam/variables.tf | 14 ---- 4 files changed, 1 insertion(+), 127 deletions(-) delete mode 100644 infrastructure/modules/iam/main.tf delete mode 100644 infrastructure/modules/iam/variables.tf diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json index 4f7fdb2905..d05f68a6fa 100644 --- a/backend/zappa_settings.example.json +++ b/backend/zappa_settings.example.json @@ -22,9 +22,8 @@ "DJANGO_SLACK_BOT_TOKEN": "${DJANGO_SLACK_BOT_TOKEN}", "DJANGO_SLACK_SIGNING_SECRET": "${DJANGO_SLACK_SIGNING_SECRET}" }, - "manage_roles": false, + "manage_roles": true, "project_name": "nest-backend", - "role_arn": "${ZAPPA_ROLE_ARN}", "runtime": "python3.13", "s3_bucket": "${ZAPPA_S3_BUCKET}", "slim_handler": true, diff --git a/infrastructure/main.tf b/infrastructure/main.tf index e87295f931..d75ccbd75e 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -27,13 +27,7 @@ module "storage" { environment = var.environment } -module "iam" { - source = "./modules/iam" - s3_bucket_arn = module.storage.zappa_s3_bucket_arn - project_name = var.project_name - environment = var.environment -} module "database" { source = "./modules/database" diff --git a/infrastructure/modules/iam/main.tf b/infrastructure/modules/iam/main.tf deleted file mode 100644 index 59dbe2e7ee..0000000000 --- a/infrastructure/modules/iam/main.tf +++ /dev/null @@ -1,105 +0,0 @@ -# IAM Role for Lambda Execution (Zappa) -resource "aws_iam_role" "zappa_execution" { - name = "${var.project_name}-${var.environment}-ZappaLambdaExecutionRole" - - assume_role_policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Sid = "" - Effect = "Allow" - Principal = { - Service = [ - "lambda.amazonaws.com", - "apigateway.amazonaws.com", - "events.amazonaws.com" - ] - } - Action = "sts:AssumeRole" - } - ] - }) - - tags = { - Name = "${var.project_name}-${var.environment}-zappa-execution-role" - } -} - -# Attach AWS managed policy for Lambda VPC execution -resource "aws_iam_role_policy_attachment" "lambda_vpc_execution" { - role = aws_iam_role.zappa_execution.name - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaVPCAccessExecutionRole" -} - -# Attach AWS managed policy for basic Lambda execution -resource "aws_iam_role_policy_attachment" "lambda_basic_execution" { - role = aws_iam_role.zappa_execution.name - policy_arn = "arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole" -} - -# Custom policy for S3 access (Zappa deployments) -resource "aws_iam_role_policy" "s3_access" { - name = "${var.project_name}-${var.environment}-s3-access" - role = aws_iam_role.zappa_execution.id - - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Action = [ - "s3:GetObject", - "s3:PutObject", - "s3:DeleteObject", - "s3:ListBucket" - ] - Resource = [ - var.s3_bucket_arn, - "${var.s3_bucket_arn}/*" - ] - } - ] - }) -} - -# Custom policy for CloudWatch Logs -resource "aws_iam_role_policy" "cloudwatch_logs" { - name = "${var.project_name}-${var.environment}-cloudwatch-logs" - role = aws_iam_role.zappa_execution.id - - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Action = [ - "logs:CreateLogGroup", - "logs:CreateLogStream", - "logs:PutLogEvents", - "logs:DescribeLogStreams" - ] - Resource = "arn:aws:logs:*:*:*" - } - ] - }) -} - -# Custom policy for X-Ray tracing (optional, but good for debugging) -resource "aws_iam_role_policy" "xray" { - name = "${var.project_name}-${var.environment}-xray" - role = aws_iam_role.zappa_execution.id - - policy = jsonencode({ - Version = "2012-10-17" - Statement = [ - { - Effect = "Allow" - Action = [ - "xray:PutTraceSegments", - "xray:PutTelemetryRecords" - ] - Resource = "*" - } - ] - }) -} diff --git a/infrastructure/modules/iam/variables.tf b/infrastructure/modules/iam/variables.tf deleted file mode 100644 index ff663d2b7f..0000000000 --- a/infrastructure/modules/iam/variables.tf +++ /dev/null @@ -1,14 +0,0 @@ -variable "environment" { - description = "The environment (e.g., staging, production)" - type = string -} - -variable "project_name" { - description = "The name of the project" - type = string -} - -variable "s3_bucket_arn" { - description = "The ARN of the S3 bucket for Zappa deployments" - type = string -} From ee1edb65b6ed29aca52a599c308bbebbc8b63368 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Sat, 18 Oct 2025 23:20:39 +0530 Subject: [PATCH 05/35] add README --- infrastructure/README.md | 105 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 105 insertions(+) create mode 100644 infrastructure/README.md diff --git a/infrastructure/README.md b/infrastructure/README.md new file mode 100644 index 0000000000..26568c6f38 --- /dev/null +++ b/infrastructure/README.md @@ -0,0 +1,105 @@ +# Infrastructure + +This document provides instructions on how to manage the infrastructure for this project using Terraform and Zappa. + +## Terraform + +### Prerequisites + +- Terraform +- An AWS account with credentials configured locally. + +### Usage + +1. **Initialize Terraform:** + + ```bash + terraform init + ``` + +2. **Plan the changes:** + + ```bash + terraform plan + ``` + +3. **Apply the changes:** + + ```bash + terraform apply + ``` + +### Variables + +You can override the default values by creating a `terraform.tfvars` file in the `infrastructure/` directory. + +# TODO: Provide an example terraform.tfvars with important vars + + +### Outputs + +Get the output values using the `terraform output` command. These outputs will be used for Zappa configuration. + + +```bash +terraform output +``` + +```bash +terraform output -raw db_password redis_auth_token +``` + +## Zappa Deployment + +The Django backend deployment is managed by Zappa, this also includes the API Gateway, IAM roles, and Lambda Function provision. + +### Install poetry dependencies + +1. **Install dependencies using Poetry:** + + ```bash + poetry install + ``` + +2. **Activate the virtual environment:** + + ```bash + eval $(poetry env activate) + ``` + +3. **Create a `zappa_settings.json` file:** + + ```bash + cp zappa_settings.example.json zappa_settings.json + ``` + +Replace all variables in the copied `zappa_settings.json` with appropriate secrets. +# TODO: explain this step + +4. **Deploy staging:** + + ```bash + zappa deploy staging + ``` + +Once deployed, Zappa will provide you with a URL. You can use this URL to test the API. + +### Updating +After making necessary changes, you may run the following command to update the deployment. +```bash +zappa update staging +``` + +### Cleaning Up + +To delete the deployment, you can use the following command: + +```bash +zappa undeploy local +``` + +Then run this command to destroy the terraform infrastructure: + +```bash +terraform destroy +``` From 90e24e14893e96ff5428ea200e2e57eecaa5c47f Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Sun, 19 Oct 2025 18:08:22 +0530 Subject: [PATCH 06/35] add RDS proxy, pre-commit hooks, clean-up code --- .pre-commit-config.yaml | 8 ++ backend/zappa_settings.example.json | 8 +- infrastructure/main.tf | 39 ++++-- infrastructure/modules/cache/main.tf | 25 +++- infrastructure/modules/database/main.tf | 135 ++++++++++++++++--- infrastructure/modules/database/outputs.tf | 6 +- infrastructure/modules/database/variables.tf | 17 +++ infrastructure/modules/networking/main.tf | 15 +++ infrastructure/modules/security/main.tf | 46 ++++++- infrastructure/modules/security/outputs.tf | 5 + infrastructure/modules/storage/main.tf | 16 +++ infrastructure/outputs.tf | 14 +- infrastructure/providers.tf | 2 +- infrastructure/variables.tf | 22 +++ 14 files changed, 317 insertions(+), 41 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 999a1c0649..de0fa8008e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -9,6 +9,14 @@ repos: - --strict exclude: (.github|pnpm-lock.yaml) + - repo: https://github.com/antonbabenko/pre-commit-terraform + rev: v1.92.0 + hooks: + - id: terraform_fmt + files: ^infrastructure/.*\.tf$ + - id: terraform_tflint + files: ^infrastructure/.*\.tf$ + - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.14.0 hooks: diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json index d05f68a6fa..e20dd34623 100644 --- a/backend/zappa_settings.example.json +++ b/backend/zappa_settings.example.json @@ -28,8 +28,12 @@ "s3_bucket": "${ZAPPA_S3_BUCKET}", "slim_handler": true, "vpc_config": { - "SecurityGroupIds": ["${VPC_SECURITY_GROUP}"], - "SubnetIds": ["${VPC_SUBNET_A}", "${VPC_SUBNET_B}"] + "SecurityGroupIds": ["${AWS_VPC_SECURITY_GROUP}"], + "SubnetIds": [ + "${AWS_VPC_SUBNET_A}", + "${AWS_VPC_SUBNET_B}", + "${AWS_VPC_SUBNET_C}" + ] } } } diff --git a/infrastructure/main.tf b/infrastructure/main.tf index d75ccbd75e..ed07af56a8 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + module "networking" { source = "./modules/networking" @@ -32,16 +47,19 @@ module "storage" { module "database" { source = "./modules/database" - db_allocated_storage = var.db_allocated_storage - db_engine_version = var.db_engine_version - db_instance_class = var.db_instance_class - db_name = var.db_name - db_password = var.db_password - db_username = var.db_username - db_subnet_ids = module.networking.private_subnet_ids - security_group_ids = [module.security.rds_sg_id] - project_name = var.project_name - environment = var.environment + db_allocated_storage = var.db_allocated_storage + db_engine_version = var.db_engine_version + db_instance_class = var.db_instance_class + db_name = var.db_name + db_password = var.db_password + db_username = var.db_username + db_storage_type = var.db_storage_type + db_backup_retention_period = var.db_backup_retention_period + db_subnet_ids = module.networking.private_subnet_ids + security_group_ids = [module.security.rds_sg_id] + proxy_security_group_ids = [module.security.rds_proxy_sg_id] + project_name = var.project_name + environment = var.environment } module "cache" { @@ -51,6 +69,7 @@ module "cache" { redis_node_type = var.redis_node_type redis_num_cache_nodes = var.redis_num_cache_nodes redis_port = var.redis_port + redis_auth_token = var.redis_auth_token subnet_ids = module.networking.private_subnet_ids security_group_ids = [module.security.redis_sg_id] project_name = var.project_name diff --git a/infrastructure/modules/cache/main.tf b/infrastructure/modules/cache/main.tf index 0807827f2c..020f1f721b 100644 --- a/infrastructure/modules/cache/main.tf +++ b/infrastructure/modules/cache/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + # ElastiCache Subnet Group resource "aws_elasticache_subnet_group" "main" { name = "${var.project_name}-${var.environment}-cache-subnet-group" @@ -20,14 +35,14 @@ resource "random_password" "redis_auth_token" { # ElastiCache Redis Replication Group resource "aws_elasticache_replication_group" "main" { - replication_group_id = "${var.project_name}-${var.environment}-cache" - description = "${var.project_name} ${var.environment} Redis cache" + replication_group_id = "${var.project_name}-${var.environment}-cache" + description = "${var.project_name} ${var.environment} Redis cache" engine = "redis" engine_version = var.redis_engine_version node_type = var.redis_node_type port = var.redis_port - parameter_group_name = "default.redis7" + parameter_group_name = "default.redis${split(".", var.redis_engine_version)[0]}" # Cluster configuration num_cache_clusters = var.redis_num_cache_nodes @@ -43,8 +58,8 @@ resource "aws_elasticache_replication_group" "main" { # Maintenance and backups snapshot_retention_limit = 5 - snapshot_window = "03:00-05:00" - maintenance_window = "mon:05:00-mon:07:00" + snapshot_window = "03:00-05:00" + maintenance_window = "mon:05:00-mon:07:00" # Automatic failover (requires at least 2 nodes) automatic_failover_enabled = var.redis_num_cache_nodes > 1 diff --git a/infrastructure/modules/database/main.tf b/infrastructure/modules/database/main.tf index a52574f8e3..8f695ec0c4 100644 --- a/infrastructure/modules/database/main.tf +++ b/infrastructure/modules/database/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + # DB Subnet Group resource "aws_db_subnet_group" "main" { name = "${var.project_name}-${var.environment}-db-subnet-group" @@ -20,13 +35,13 @@ resource "random_password" "db_password" { # RDS PostgreSQL Instance resource "aws_db_instance" "main" { - identifier = "${var.project_name}-${var.environment}-db" - engine = "postgres" - engine_version = var.db_engine_version - instance_class = var.db_instance_class - allocated_storage = var.db_allocated_storage - storage_type = "gp3" - storage_encrypted = true + identifier = lower("${var.project_name}-${var.environment}-db") + engine = "postgres" + engine_version = var.db_engine_version + instance_class = var.db_instance_class + allocated_storage = var.db_allocated_storage + storage_type = var.db_storage_type + storage_encrypted = true db_name = var.db_name username = var.db_username @@ -36,14 +51,13 @@ resource "aws_db_instance" "main" { vpc_security_group_ids = var.security_group_ids publicly_accessible = false - backup_retention_period = 7 - backup_window = "03:00-04:00" - maintenance_window = "mon:04:00-mon:05:00" + backup_retention_period = var.db_backup_retention_period + backup_window = "03:00-04:00" + maintenance_window = "mon:04:00-mon:05:00" # Enable automated backups - skip_final_snapshot = true - final_snapshot_identifier = "${var.project_name}-${var.environment}-final-snapshot-${formatdate("YYYY-MM-DD-hhmm", timestamp())}" - copy_tags_to_snapshot = true + skip_final_snapshot = true + copy_tags_to_snapshot = true # Performance Insights enabled_cloudwatch_logs_exports = ["postgresql", "upgrade"] @@ -51,10 +65,99 @@ resource "aws_db_instance" "main" { tags = { Name = "${var.project_name}-${var.environment}-postgres" } +} + +# Secrets Manager Secret for DB Credentials +resource "aws_secretsmanager_secret" "db_credentials" { + name = "${var.project_name}-${var.environment}-db-credentials" + tags = { + Name = "${var.project_name}-${var.environment}-db-credentials" + } + recovery_window_in_days = 0 +} + +resource "aws_secretsmanager_secret_version" "db_credentials" { + secret_id = aws_secretsmanager_secret.db_credentials.id + secret_string = jsonencode({ + username = var.db_username + password = var.db_password != null && var.db_password != "" ? var.db_password : random_password.db_password[0].result + }) +} + +# IAM Role for RDS Proxy +resource "aws_iam_role" "rds_proxy" { + name = "${var.project_name}-${var.environment}-rds-proxy-role" + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "rds.amazonaws.com" + } + } + ] + }) + tags = { + Name = "${var.project_name}-${var.environment}-rds-proxy-role" + } +} - lifecycle { - ignore_changes = [ - final_snapshot_identifier +resource "aws_iam_role_policy" "rds_proxy" { + name = "${var.project_name}-${var.environment}-rds-proxy-policy" + role = aws_iam_role.rds_proxy.id + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "secretsmanager:GetSecretValue" + ] + Effect = "Allow" + Resource = aws_secretsmanager_secret.db_credentials.arn + } ] + }) +} + +# RDS Proxy +resource "aws_db_proxy" "main" { + name = "${var.project_name}-${var.environment}-proxy" + debug_logging = false + engine_family = "POSTGRESQL" + idle_client_timeout = 1800 + require_tls = true + role_arn = aws_iam_role.rds_proxy.arn + vpc_security_group_ids = var.proxy_security_group_ids + vpc_subnet_ids = var.db_subnet_ids + + auth { + auth_scheme = "SECRETS" + description = "Database credentials" + iam_auth = "DISABLED" + secret_arn = aws_secretsmanager_secret.db_credentials.arn + } + + tags = { + Name = "${var.project_name}-${var.environment}-rds-proxy" } } + +# RDS Proxy Default Target Group +resource "aws_db_proxy_default_target_group" "main" { + db_proxy_name = aws_db_proxy.main.name + + connection_pool_config { + connection_borrow_timeout = 120 + max_connections_percent = 100 + max_idle_connections_percent = 50 + } +} + +# RDS Proxy Target +resource "aws_db_proxy_target" "main" { + db_instance_identifier = aws_db_instance.main.identifier + db_proxy_name = aws_db_proxy.main.name + target_group_name = aws_db_proxy_default_target_group.main.name +} diff --git a/infrastructure/modules/database/outputs.tf b/infrastructure/modules/database/outputs.tf index 339eecd8a0..3e62f2e0cc 100644 --- a/infrastructure/modules/database/outputs.tf +++ b/infrastructure/modules/database/outputs.tf @@ -1,6 +1,6 @@ -output "db_instance_endpoint" { - description = "The endpoint of the RDS instance" - value = aws_db_instance.main.endpoint +output "db_proxy_endpoint" { + description = "The endpoint of the RDS proxy" + value = aws_db_proxy.main.endpoint } output "db_password" { diff --git a/infrastructure/modules/database/variables.tf b/infrastructure/modules/database/variables.tf index eb878381cf..b9dba03f95 100644 --- a/infrastructure/modules/database/variables.tf +++ b/infrastructure/modules/database/variables.tf @@ -49,3 +49,20 @@ variable "security_group_ids" { description = "A list of security group IDs to associate with the RDS database" type = list(string) } + +variable "db_storage_type" { + description = "The storage type for the RDS database" + type = string + default = "gp3" +} + +variable "db_backup_retention_period" { + description = "The number of days to retain backups for" + type = number + default = 7 +} + +variable "proxy_security_group_ids" { + description = "A list of security group IDs to associate with the RDS proxy" + type = list(string) +} diff --git a/infrastructure/modules/networking/main.tf b/infrastructure/modules/networking/main.tf index 3c46147394..7a105d214c 100644 --- a/infrastructure/modules/networking/main.tf +++ b/infrastructure/modules/networking/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + # VPC resource "aws_vpc" "main" { cidr_block = var.vpc_cidr diff --git a/infrastructure/modules/security/main.tf b/infrastructure/modules/security/main.tf index 58cf9e193c..f8e224f588 100644 --- a/infrastructure/modules/security/main.tf +++ b/infrastructure/modules/security/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + # Lambda Security Group resource "aws_security_group" "lambda" { name = "${var.project_name}-${var.environment}-lambda-sg" @@ -17,6 +32,33 @@ resource "aws_security_group" "lambda" { } } +# RDS Proxy Security Group +resource "aws_security_group" "rds_proxy" { + name = "${var.project_name}-${var.environment}-rds-proxy-sg" + description = "Security group for RDS Proxy" + vpc_id = var.vpc_id + + ingress { + description = "PostgreSQL from Lambda" + from_port = var.db_port + to_port = var.db_port + protocol = "tcp" + security_groups = [aws_security_group.lambda.id] + } + + egress { + description = "Allow all outbound traffic" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = { + Name = "${var.project_name}-${var.environment}-rds-proxy-sg" + } +} + # RDS Security Group resource "aws_security_group" "rds" { name = "${var.project_name}-${var.environment}-rds-sg" @@ -24,11 +66,11 @@ resource "aws_security_group" "rds" { vpc_id = var.vpc_id ingress { - description = "PostgreSQL from Lambda" + description = "PostgreSQL from RDS Proxy" from_port = var.db_port to_port = var.db_port protocol = "tcp" - security_groups = [aws_security_group.lambda.id] + security_groups = [aws_security_group.rds_proxy.id] } egress { diff --git a/infrastructure/modules/security/outputs.tf b/infrastructure/modules/security/outputs.tf index adbf50a498..0c5d68ae4e 100644 --- a/infrastructure/modules/security/outputs.tf +++ b/infrastructure/modules/security/outputs.tf @@ -12,3 +12,8 @@ output "redis_sg_id" { description = "The ID of the Redis security group" value = aws_security_group.redis.id } + +output "rds_proxy_sg_id" { + description = "The ID of the RDS proxy security group" + value = aws_security_group.rds_proxy.id +} diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 6e8d4ef5e1..15793e23bd 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -1,3 +1,18 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + random = { + source = "hashicorp/random" + version = "~> 3.0" + } + } +} + # S3 Bucket for Zappa Deployments resource "aws_s3_bucket" "zappa" { bucket = var.zappa_s3_bucket @@ -5,6 +20,7 @@ resource "aws_s3_bucket" "zappa" { tags = { Name = "${var.project_name}-${var.environment}-zappa-deployments" } + force_destroy = true } # Block public access diff --git a/infrastructure/outputs.tf b/infrastructure/outputs.tf index 989898a4c0..49aaec3167 100644 --- a/infrastructure/outputs.tf +++ b/infrastructure/outputs.tf @@ -1,6 +1,6 @@ output "database_endpoint" { - description = "The endpoint of the RDS database" - value = module.database.db_instance_endpoint + description = "The endpoint of the RDS proxy" + value = module.database.db_proxy_endpoint } output "redis_endpoint" { @@ -19,3 +19,13 @@ output "redis_auth_token" { value = module.cache.redis_auth_token sensitive = true } + +output "private_subnet_ids" { + description = "A list of private subnet IDs" + value = module.networking.private_subnet_ids +} + +output "lambda_security_group_id" { + description = "The ID of the security group for the Lambda function" + value = module.security.lambda_sg_id +} diff --git a/infrastructure/providers.tf b/infrastructure/providers.tf index 3aafc50044..c9d7ccbdea 100644 --- a/infrastructure/providers.tf +++ b/infrastructure/providers.tf @@ -1,3 +1,3 @@ provider "aws" { - region = "ap-south-1" + region = var.aws_region } diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf index 2fd0dbb5a3..e45b9dd139 100644 --- a/infrastructure/variables.tf +++ b/infrastructure/variables.tf @@ -1,3 +1,9 @@ +variable "aws_region" { + description = "The AWS region to deploy resources in." + type = string + default = "us-east-1" +} + variable "availability_zones" { description = "A list of availability zones for the VPC" type = list(string) @@ -51,6 +57,10 @@ variable "environment" { description = "The environment (e.g., staging, production)" type = string default = "staging" + validation { + condition = contains(["staging", "production"], var.environment) + error_message = "Environment must be either 'staging' or 'production'." + } } variable "private_subnet_cidrs" { @@ -113,3 +123,15 @@ variable "zappa_s3_bucket" { type = string default = "nest-zappa-deployments" } + +variable "db_storage_type" { + description = "The storage type for the RDS database" + type = string + default = "gp3" +} + +variable "db_backup_retention_period" { + description = "The number of days to retain backups for" + type = number + default = 7 +} From e61880833bfeee64f5c20184d63b2d6979e03d70 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 12:32:44 +0530 Subject: [PATCH 07/35] feat: add ecs tasks --- .gitignore | 2 + infrastructure/main.tf | 28 ++ infrastructure/modules/ecs/main.tf | 573 ++++++++++++++++++++++++ infrastructure/modules/ecs/outputs.tf | 9 + infrastructure/modules/ecs/variables.tf | 215 +++++++++ infrastructure/outputs.tf | 5 + infrastructure/variables.tf | 119 +++++ 7 files changed, 951 insertions(+) create mode 100644 infrastructure/modules/ecs/main.tf create mode 100644 infrastructure/modules/ecs/outputs.tf create mode 100644 infrastructure/modules/ecs/variables.tf diff --git a/.gitignore b/.gitignore index ea7b351153..f38c72cf9f 100644 --- a/.gitignore +++ b/.gitignore @@ -27,6 +27,8 @@ __pycache__ *.log *.pdf *.pem +backend/*nest-backend-dev*.zip +backend/*nest-backend-dev*.tar.gz backend/*nest-backend-staging*.zip backend/*nest-backend-staging*.tar.gz backend/data/backup* diff --git a/infrastructure/main.tf b/infrastructure/main.tf index ed07af56a8..9cd4a9d509 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -75,3 +75,31 @@ module "cache" { project_name = var.project_name environment = var.environment } + +module "ecs" { + source = "./modules/ecs" + + project_name = var.project_name + environment = var.environment + aws_region = var.aws_region + private_subnet_ids = module.networking.private_subnet_ids + lambda_sg_id = module.security.lambda_sg_id + django_algolia_application_id = var.django_algolia_application_id + django_algolia_write_api_key = var.django_algolia_write_api_key + django_allowed_hosts = var.django_allowed_hosts + django_aws_access_key_id = var.django_aws_access_key_id + django_aws_secret_access_key = var.django_aws_secret_access_key + django_configuration = var.django_configuration + django_db_host = var.django_db_host + django_db_name = var.django_db_name + django_db_user = var.django_db_user + django_db_port = var.django_db_port + django_db_password = var.django_db_password + django_open_ai_secret_key = var.django_open_ai_secret_key + django_redis_host = var.django_redis_host + django_redis_password = var.django_redis_password + django_secret_key = var.django_secret_key + django_sentry_dsn = var.django_sentry_dsn + django_slack_bot_token = var.django_slack_bot_token + django_slack_signing_secret = var.django_slack_signing_secret +} diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf new file mode 100644 index 0000000000..d72a2f9235 --- /dev/null +++ b/infrastructure/modules/ecs/main.tf @@ -0,0 +1,573 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + } +} + +locals { + common_tags = { + Project = var.project_name + Environment = var.environment + ManagedBy = "Terraform" + } +} + +resource "aws_ecs_cluster" "main" { + name = "${var.project_name}-${var.environment}-cluster" + tags = local.common_tags +} + +resource "aws_ecr_repository" "main" { + name = "${var.project_name}-${var.environment}-backend" + tags = local.common_tags +} + +resource "aws_iam_role" "ecs_tasks_execution_role" { + name = "${var.project_name}-${var.environment}-ecs-tasks-execution-role" + tags = local.common_tags + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "ecs-tasks.amazonaws.com" + } + } + ] + }) +} + +resource "aws_iam_role_policy_attachment" "ecs_tasks_execution_role_policy" { + role = aws_iam_role.ecs_tasks_execution_role.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" +} + +resource "aws_iam_role" "event_bridge_role" { + name = "${var.project_name}-${var.environment}-event-bridge-role" + tags = local.common_tags + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "events.amazonaws.com" + } + } + ] + }) +} + +resource "aws_iam_role_policy_attachment" "event_bridge_role_policy" { + role = aws_iam_role.event_bridge_role.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceEventsRole" +} + +# Task defitions +resource "aws_ecs_task_definition" "sync_data" { + family = "${var.project_name}-${var.environment}-sync-data" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.sync_data_task_cpu + memory = var.sync_data_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = aws_ecr_repository.main.repository_url + command = ["python", "manage.py", "sync-data"] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.sync_data.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + } + ]) +} + +resource "aws_cloudwatch_log_group" "sync_data" { + name = "/ecs/${var.project_name}-${var.environment}-sync-data" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_rule" "sync_data" { + name = "${var.project_name}-${var.environment}-sync-data-rule" + description = "Fires daily to trigger the sync-data task" + schedule_expression = "cron(17 05 * * ? *)" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_target" "sync_data" { + rule = aws_cloudwatch_event_rule.sync_data.name + target_id = "${var.project_name}-${var.environment}-sync-data-target" + arn = aws_ecs_cluster.main.arn + + ecs_target { + task_definition_arn = aws_ecs_task_definition.sync_data.arn + launch_type = "FARGATE" + network_configuration { + subnets = var.private_subnet_ids + security_groups = [var.lambda_sg_id] + assign_public_ip = false + } + } + + role_arn = aws_iam_role.event_bridge_role.arn +} + +resource "aws_ecs_task_definition" "owasp_update_project_health_metrics" { + family = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.update_project_health_metrics_task_cpu + memory = var.update_project_health_metrics_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = aws_ecr_repository.main.repository_url + command = ["/bin/sh", "-c", "python manage.py owasp-update-project-health-requirements && python manage.py owasp-update-project-health-metrics"] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.owasp_update_project_health_metrics.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + } + ]) +} + +resource "aws_cloudwatch_log_group" "owasp_update_project_health_metrics" { + name = "/ecs/${var.project_name}-${var.environment}-owasp-update-project-health-metrics" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_rule" "owasp_update_project_health_metrics" { + name = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics-rule" + description = "Fires daily to trigger the owasp-update-project-health-metrics task" + schedule_expression = "cron(17 17 * * ? *)" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_target" "owasp_update_project_health_metrics" { + rule = aws_cloudwatch_event_rule.owasp_update_project_health_metrics.name + target_id = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics-target" + arn = aws_ecs_cluster.main.arn + + ecs_target { + task_definition_arn = aws_ecs_task_definition.owasp_update_project_health_metrics.arn + launch_type = "FARGATE" + network_configuration { + subnets = var.private_subnet_ids + security_groups = [var.lambda_sg_id] + assign_public_ip = false + } + } + + role_arn = aws_iam_role.event_bridge_role.arn +} + +resource "aws_ecs_task_definition" "owasp_update_project_health_scores" { + family = "${var.project_name}-${var.environment}-owasp-update-project-health-scores" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.update_project_health_scores_task_cpu + memory = var.update_project_health_scores_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = aws_ecr_repository.main.repository_url + command = ["python", "manage.py", "owasp-update-project-health-scores"] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.owasp_update_project_health_scores.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + } + ]) +} + +resource "aws_cloudwatch_log_group" "owasp_update_project_health_scores" { + name = "/ecs/${var.project_name}-${var.environment}-owasp-update-project-health-scores" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_rule" "owasp_update_project_health_scores" { + name = "${var.project_name}-${var.environment}-owasp-update-project-health-scores-rule" + description = "Fires daily to trigger the owasp-update-project-health-scores task" + schedule_expression = "cron(22 17 * * ? *)" + tags = local.common_tags +} + +resource "aws_cloudwatch_event_target" "owasp_update_project_health_scores" { + rule = aws_cloudwatch_event_rule.owasp_update_project_health_scores.name + target_id = "${var.project_name}-${var.environment}-owasp-update-project-health-scores-target" + arn = aws_ecs_cluster.main.arn + + ecs_target { + task_definition_arn = aws_ecs_task_definition.owasp_update_project_health_scores.arn + launch_type = "FARGATE" + network_configuration { + subnets = var.private_subnet_ids + security_groups = [var.lambda_sg_id] + assign_public_ip = false + } + } + + role_arn = aws_iam_role.event_bridge_role.arn +} + +# One time tasks +resource "aws_ecs_task_definition" "migrate" { + family = "${var.project_name}-${var.environment}-migrate" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.migrate_task_cpu + memory = var.migrate_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = "${aws_ecr_repository.main.repository_url}:latest" + command = ["python", "manage.py", "migrate"] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.load_data.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + environment = [ + { + name = "DJANGO_ALGOLIA_APPLICATION_ID" + value = var.django_algolia_application_id + }, + { + name = "DJANGO_ALGOLIA_WRITE_API_KEY" + value = var.django_algolia_write_api_key + }, + { + name = "DJANGO_ALLOWED_HOSTS" + value = var.django_allowed_hosts + }, + { + name = "DJANGO_AWS_ACCESS_KEY_ID" + value = var.django_aws_access_key_id + }, + { + name = "DJANGO_AWS_SECRET_ACCESS_KEY" + value = var.django_aws_secret_access_key + }, + { + name = "DJANGO_CONFIGURATION" + value = var.django_configuration + }, + { + name = "DJANGO_DB_HOST" + value = var.django_db_host + }, + { + name = "DJANGO_DB_NAME" + value = var.django_db_name + }, + { + name = "DJANGO_DB_USER" + value = var.django_db_user + }, + { + name = "DJANGO_DB_PORT" + value = var.django_db_port + }, + { + name = "DJANGO_DB_PASSWORD" + value = var.django_db_password + }, + { + name = "DJANGO_OPEN_AI_SECRET_KEY" + value = var.django_open_ai_secret_key + }, + { + name = "DJANGO_REDIS_HOST" + value = var.django_redis_host + }, + { + name = "DJANGO_REDIS_PASSWORD" + value = var.django_redis_password + }, + { + name = "DJANGO_SECRET_KEY" + value = var.django_secret_key + }, + { + name = "DJANGO_SENTRY_DSN" + value = var.django_sentry_dsn + }, + { + name = "DJANGO_SLACK_BOT_TOKEN" + value = var.django_slack_bot_token + }, + { + name = "DJANGO_SLACK_SIGNING_SECRET" + value = var.django_slack_signing_secret + } + ] + } + ]) +} + +resource "aws_cloudwatch_log_group" "migrate" { + name = "/ecs/${var.project_name}-${var.environment}-migrate" + tags = local.common_tags +} + +resource "aws_ecs_task_definition" "load_data" { + family = "${var.project_name}-${var.environment}-load-data" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.load_data_task_cpu + memory = var.load_data_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = "${aws_ecr_repository.main.repository_url}:latest" + command = ["python", "manage.py", "load_data"] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.load_data.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + environment = [ + { + name = "DJANGO_ALGOLIA_APPLICATION_ID" + value = var.django_algolia_application_id + }, + { + name = "DJANGO_ALGOLIA_WRITE_API_KEY" + value = var.django_algolia_write_api_key + }, + { + name = "DJANGO_ALLOWED_HOSTS" + value = var.django_allowed_hosts + }, + { + name = "DJANGO_AWS_ACCESS_KEY_ID" + value = var.django_aws_access_key_id + }, + { + name = "DJANGO_AWS_SECRET_ACCESS_KEY" + value = var.django_aws_secret_access_key + }, + { + name = "DJANGO_CONFIGURATION" + value = var.django_configuration + }, + { + name = "DJANGO_DB_HOST" + value = var.django_db_host + }, + { + name = "DJANGO_DB_NAME" + value = var.django_db_name + }, + { + name = "DJANGO_DB_USER" + value = var.django_db_user + }, + { + name = "DJANGO_DB_PORT" + value = var.django_db_port + }, + { + name = "DJANGO_DB_PASSWORD" + value = var.django_db_password + }, + { + name = "DJANGO_OPEN_AI_SECRET_KEY" + value = var.django_open_ai_secret_key + }, + { + name = "DJANGO_REDIS_HOST" + value = var.django_redis_host + }, + { + name = "DJANGO_REDIS_PASSWORD" + value = var.django_redis_password + }, + { + name = "DJANGO_SECRET_KEY" + value = var.django_secret_key + }, + { + name = "DJANGO_SENTRY_DSN" + value = var.django_sentry_dsn + }, + { + name = "DJANGO_SLACK_BOT_TOKEN" + value = var.django_slack_bot_token + }, + { + name = "DJANGO_SLACK_SIGNING_SECRET" + value = var.django_slack_signing_secret + } + ] + } + ]) +} + +resource "aws_cloudwatch_log_group" "load_data" { + name = "/ecs/${var.project_name}-${var.environment}-load-data" + tags = local.common_tags +} + +resource "aws_ecs_task_definition" "index_data" { + family = "${var.project_name}-${var.environment}-index-data" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.index_data_task_cpu + memory = var.index_data_task_memory + execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + tags = local.common_tags + + container_definitions = jsonencode([ + { + name = "backend" + image = "${aws_ecr_repository.main.repository_url}:latest" + command = [ + "/bin/sh", + "-c", + <<-EOT + python manage.py algolia_reindex + python manage.py algolia_update_replicas + python manage.py algolia_update_synonyms + EOT + ] + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.load_data.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + environment = [ + { + name = "DJANGO_ALGOLIA_APPLICATION_ID" + value = var.django_algolia_application_id + }, + { + name = "DJANGO_ALGOLIA_WRITE_API_KEY" + value = var.django_algolia_write_api_key + }, + { + name = "DJANGO_ALLOWED_HOSTS" + value = var.django_allowed_hosts + }, + { + name = "DJANGO_AWS_ACCESS_KEY_ID" + value = var.django_aws_access_key_id + }, + { + name = "DJANGO_AWS_SECRET_ACCESS_KEY" + value = var.django_aws_secret_access_key + }, + { + name = "DJANGO_CONFIGURATION" + value = var.django_configuration + }, + { + name = "DJANGO_DB_HOST" + value = var.django_db_host + }, + { + name = "DJANGO_DB_NAME" + value = var.django_db_name + }, + { + name = "DJANGO_DB_USER" + value = var.django_db_user + }, + { + name = "DJANGO_DB_PORT" + value = var.django_db_port + }, + { + name = "DJANGO_DB_PASSWORD" + value = var.django_db_password + }, + { + name = "DJANGO_OPEN_AI_SECRET_KEY" + value = var.django_open_ai_secret_key + }, + { + name = "DJANGO_REDIS_HOST" + value = var.django_redis_host + }, + { + name = "DJANGO_REDIS_PASSWORD" + value = var.django_redis_password + }, + { + name = "DJANGO_SECRET_KEY" + value = var.django_secret_key + }, + { + name = "DJANGO_SENTRY_DSN" + value = var.django_sentry_dsn + }, + { + name = "DJANGO_SLACK_BOT_TOKEN" + value = var.django_slack_bot_token + }, + { + name = "DJANGO_SLACK_SIGNING_SECRET" + value = var.django_slack_signing_secret + } + ] + } + ]) +} + +resource "aws_cloudwatch_log_group" "index_data" { + name = "/ecs/${var.project_name}-${var.environment}-index-data" + tags = local.common_tags +} diff --git a/infrastructure/modules/ecs/outputs.tf b/infrastructure/modules/ecs/outputs.tf new file mode 100644 index 0000000000..acb7cd7fdc --- /dev/null +++ b/infrastructure/modules/ecs/outputs.tf @@ -0,0 +1,9 @@ +output "ecs_cluster_arn" { + description = "The ARN of the ECS cluster" + value = aws_ecs_cluster.main.arn +} + +output "ecr_repository_url" { + description = "The URL of the ECR repository" + value = aws_ecr_repository.main.repository_url +} diff --git a/infrastructure/modules/ecs/variables.tf b/infrastructure/modules/ecs/variables.tf new file mode 100644 index 0000000000..53eb5b4bf4 --- /dev/null +++ b/infrastructure/modules/ecs/variables.tf @@ -0,0 +1,215 @@ +variable "project_name" { + description = "The name of the project" + type = string +} + +variable "environment" { + description = "The environment (e.g., staging, production)" + type = string +} + +variable "aws_region" { + description = "The AWS region" + type = string +} + +variable "private_subnet_ids" { + description = "A list of private subnet IDs" + type = list(string) +} + +variable "lambda_sg_id" { + description = "The ID of the security group for the Lambda function" + type = string +} + +variable "sync_data_task_cpu" { + description = "The CPU for the sync-data task" + type = string + default = "256" +} + +variable "sync_data_task_memory" { + description = "The memory for the sync-data task" + type = string + default = "512" +} + +variable "update_project_health_metrics_task_cpu" { + description = "The CPU for the update-project-health-metrics task" + type = string + default = "256" +} + +variable "update_project_health_metrics_task_memory" { + description = "The memory for the update-project-health-metrics task" + type = string + default = "512" +} + +variable "update_project_health_scores_task_cpu" { + description = "The CPU for the update-project-health-scores task" + type = string + default = "256" +} + +variable "update_project_health_scores_task_memory" { + description = "The memory for the update-project-health-scores task" + type = string + default = "512" +} + +# One time tasks +variable "migrate_task_cpu" { + description = "The CPU for the load-data task" + type = string + default = "256" +} + +variable "migrate_task_memory" { + description = "The memory for the load-data task" + type = string + default = "2048" +} + +variable "load_data_task_cpu" { + description = "The CPU for the load-data task" + type = string + default = "256" +} + +variable "load_data_task_memory" { + description = "The memory for the load-data task" + type = string + default = "2048" +} + +variable "index_data_task_cpu" { + description = "The CPU for the index-data task" + type = string + default = "256" +} + +variable "index_data_task_memory" { + description = "The memory for the index-data task" + type = string + default = "2048" +} +# Environment Variables (temporary) +variable "django_algolia_application_id" { + type = string + description = "Algolia application ID." + default = null +} + +variable "django_allowed_hosts" { + type = string + description = "Comma-separated list of allowed hosts for Django." + default = null +} + +variable "django_db_host" { + type = string + description = "Database host URL." + default = null +} + +variable "django_db_name" { + type = string + description = "Database name." + default = null +} + +variable "django_db_user" { + type = string + description = "Database user." + default = null +} + +variable "django_db_port" { + type = string + description = "Database port." + default = null +} + +variable "django_redis_host" { + type = string + description = "Redis host URL." + default = null +} + +variable "django_algolia_write_api_key" { + type = string + description = "Algolia write API key." + sensitive = true + default = null +} + +variable "django_aws_access_key_id" { + type = string + description = "AWS access key for Django." + sensitive = true + default = null +} + +variable "django_aws_secret_access_key" { + type = string + description = "AWS secret access key for Django." + sensitive = true + default = null +} + +variable "django_configuration" { + type = string + description = "Django Configuration" + default = null +} + +variable "django_db_password" { + type = string + description = "Database password." + sensitive = true + default = null +} + +variable "django_open_ai_secret_key" { + type = string + description = "OpenAI secret key." + sensitive = true + default = null +} + +variable "django_redis_password" { + type = string + description = "Redis password." + sensitive = true + default = null +} + +variable "django_secret_key" { + type = string + description = "Django secret key." + sensitive = true + default = null +} + +variable "django_sentry_dsn" { + type = string + description = "Sentry DSN for error tracking." + sensitive = true + default = null +} + +variable "django_slack_bot_token" { + type = string + description = "Slack bot token." + sensitive = true + default = null +} + +variable "django_slack_signing_secret" { + type = string + description = "Slack signing secret." + sensitive = true + default = null +} diff --git a/infrastructure/outputs.tf b/infrastructure/outputs.tf index 49aaec3167..2c2724cdd2 100644 --- a/infrastructure/outputs.tf +++ b/infrastructure/outputs.tf @@ -29,3 +29,8 @@ output "lambda_security_group_id" { description = "The ID of the security group for the Lambda function" value = module.security.lambda_sg_id } + +output "ecr_repository_url" { + description = "The URL of the ECR repository" + value = module.ecs.ecr_repository_url +} diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf index e45b9dd139..ed17d4c244 100644 --- a/infrastructure/variables.tf +++ b/infrastructure/variables.tf @@ -135,3 +135,122 @@ variable "db_backup_retention_period" { type = number default = 7 } + +# Environment Variables (temporary) +variable "django_algolia_application_id" { + type = string + description = "Algolia application ID." + default = null +} + +variable "django_allowed_hosts" { + type = string + description = "Comma-separated list of allowed hosts for Django." + default = null +} + +variable "django_db_host" { + type = string + description = "Database host URL." + default = null +} + +variable "django_db_name" { + type = string + description = "Database name." + default = null +} + +variable "django_db_user" { + type = string + description = "Database user." + default = null +} + +variable "django_db_port" { + type = string + description = "Database port." + default = null +} + +variable "django_redis_host" { + type = string + description = "Redis host URL." + default = null +} + +variable "django_algolia_write_api_key" { + type = string + description = "Algolia write API key." + sensitive = true + default = null +} + +variable "django_aws_access_key_id" { + type = string + description = "AWS access key for Django." + sensitive = true + default = null +} + +variable "django_aws_secret_access_key" { + type = string + description = "AWS secret access key for Django." + sensitive = true + default = null +} + +variable "django_configuration" { + type = string + description = "Django Configuration" + default = null +} + +variable "django_db_password" { + type = string + description = "Database password." + sensitive = true + default = null +} + +variable "django_open_ai_secret_key" { + type = string + description = "OpenAI secret key." + sensitive = true + default = null +} + +variable "django_redis_password" { + type = string + description = "Redis password." + sensitive = true + default = null +} + +variable "django_secret_key" { + type = string + description = "Django secret key." + sensitive = true + default = null +} + +variable "django_sentry_dsn" { + type = string + description = "Sentry DSN for error tracking." + sensitive = true + default = null +} + +variable "django_slack_bot_token" { + type = string + description = "Slack bot token." + sensitive = true + default = null +} + +variable "django_slack_signing_secret" { + type = string + description = "Slack signing secret." + sensitive = true + default = null +} From 6220c24ea54558714987c3e4e45e9d9ec420b152 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 13:42:44 +0530 Subject: [PATCH 08/35] refactor/clean cache module --- infrastructure/main.tf | 19 ++++-- infrastructure/modules/cache/main.tf | 74 ++++++++++------------- infrastructure/modules/cache/outputs.tf | 10 +-- infrastructure/modules/cache/variables.tf | 30 +++++++++ 4 files changed, 79 insertions(+), 54 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index 9cd4a9d509..ac0c193d1f 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -13,6 +13,14 @@ terraform { } } +locals { + common_tags = { + Environment = var.environment + ManagedBy = "Terraform" + Project = var.project_name + } +} + module "networking" { source = "./modules/networking" @@ -42,8 +50,6 @@ module "storage" { environment = var.environment } - - module "database" { source = "./modules/database" @@ -65,15 +71,16 @@ module "database" { module "cache" { source = "./modules/cache" + common_tags = local.common_tags + environment = var.environment + project_name = var.project_name + redis_auth_token = var.redis_auth_token redis_engine_version = var.redis_engine_version redis_node_type = var.redis_node_type redis_num_cache_nodes = var.redis_num_cache_nodes redis_port = var.redis_port - redis_auth_token = var.redis_auth_token - subnet_ids = module.networking.private_subnet_ids security_group_ids = [module.security.redis_sg_id] - project_name = var.project_name - environment = var.environment + subnet_ids = module.networking.private_subnet_ids } module "ecs" { diff --git a/infrastructure/modules/cache/main.tf b/infrastructure/modules/cache/main.tf index 020f1f721b..5461264471 100644 --- a/infrastructure/modules/cache/main.tf +++ b/infrastructure/modules/cache/main.tf @@ -13,61 +13,49 @@ terraform { } } -# ElastiCache Subnet Group +locals { + generate_redis_auth_token = var.redis_auth_token == null || var.redis_auth_token == "" + parameter_group_name = "default.redis${local.redis_major_version}" + redis_auth_token = local.generate_redis_auth_token ? random_password.redis_auth_token[0].result : var.redis_auth_token + redis_major_version = split(".", var.redis_engine_version)[0] +} + resource "aws_elasticache_subnet_group" "main" { name = "${var.project_name}-${var.environment}-cache-subnet-group" subnet_ids = var.subnet_ids - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-cache-subnet-group" - } + }) } -# Random auth token for Redis (if not provided) resource "random_password" "redis_auth_token" { - count = var.redis_auth_token == null || var.redis_auth_token == "" ? 1 : 0 - - length = 32 - special = true - # Redis auth token has specific requirements + count = local.generate_redis_auth_token ? 1 : 0 + length = 32 + # Redis auth token has specific requirements for special characters. override_special = "!&#$^<>-" + special = true } -# ElastiCache Redis Replication Group resource "aws_elasticache_replication_group" "main" { - replication_group_id = "${var.project_name}-${var.environment}-cache" - description = "${var.project_name} ${var.environment} Redis cache" - - engine = "redis" - engine_version = var.redis_engine_version - node_type = var.redis_node_type - port = var.redis_port - parameter_group_name = "default.redis${split(".", var.redis_engine_version)[0]}" - - # Cluster configuration - num_cache_clusters = var.redis_num_cache_nodes - - # Network configuration - subnet_group_name = aws_elasticache_subnet_group.main.name - security_group_ids = var.security_group_ids - - # Security at_rest_encryption_enabled = true - transit_encryption_enabled = true - auth_token = var.redis_auth_token != null && var.redis_auth_token != "" ? var.redis_auth_token : random_password.redis_auth_token[0].result - - # Maintenance and backups - snapshot_retention_limit = 5 - snapshot_window = "03:00-05:00" - maintenance_window = "mon:05:00-mon:07:00" - - # Automatic failover (requires at least 2 nodes) + auth_token = local.redis_auth_token + auto_minor_version_upgrade = var.auto_minor_version_upgrade automatic_failover_enabled = var.redis_num_cache_nodes > 1 - - # Enable automatic minor version upgrades - auto_minor_version_upgrade = true - - tags = { + description = "${var.project_name} ${var.environment} Redis cache" + engine = "redis" + engine_version = var.redis_engine_version + maintenance_window = var.maintenance_window + node_type = var.redis_node_type + num_cache_clusters = var.redis_num_cache_nodes + parameter_group_name = local.parameter_group_name + port = var.redis_port + replication_group_id = "${var.project_name}-${var.environment}-cache" + security_group_ids = var.security_group_ids + snapshot_retention_limit = var.snapshot_retention_limit + snapshot_window = var.snapshot_window + subnet_group_name = aws_elasticache_subnet_group.main.name + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-redis" - } + }) + transit_encryption_enabled = true } diff --git a/infrastructure/modules/cache/outputs.tf b/infrastructure/modules/cache/outputs.tf index a15d4c8a15..f1582444ff 100644 --- a/infrastructure/modules/cache/outputs.tf +++ b/infrastructure/modules/cache/outputs.tf @@ -1,10 +1,10 @@ -output "redis_primary_endpoint" { - description = "The primary endpoint of the Redis replication group" - value = aws_elasticache_replication_group.main.primary_endpoint_address -} - output "redis_auth_token" { description = "The auth token for Redis" value = random_password.redis_auth_token[0].result sensitive = true } + +output "redis_primary_endpoint" { + description = "The primary endpoint of the Redis replication group" + value = aws_elasticache_replication_group.main.primary_endpoint_address +} diff --git a/infrastructure/modules/cache/variables.tf b/infrastructure/modules/cache/variables.tf index 38810d8424..584b5557fd 100644 --- a/infrastructure/modules/cache/variables.tf +++ b/infrastructure/modules/cache/variables.tf @@ -1,8 +1,26 @@ +variable "auto_minor_version_upgrade" { + description = "Determines whether minor engine upgrades will be applied automatically." + type = bool + default = true +} + +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + variable "environment" { description = "The environment (e.g., staging, production)" type = string } +variable "maintenance_window" { + description = "The weekly time range for when maintenance on the cache cluster is performed." + type = string + default = "mon:05:00-mon:07:00" +} + variable "project_name" { description = "The name of the project" type = string @@ -40,6 +58,18 @@ variable "security_group_ids" { type = list(string) } +variable "snapshot_retention_limit" { + description = "The number of days for which automatic snapshots are retained." + type = number + default = 5 +} + +variable "snapshot_window" { + description = "The daily time range (in UTC) during which ElastiCache will begin taking a daily snapshot." + type = string + default = "03:00-05:00" +} + variable "subnet_ids" { description = "A list of subnet IDs for the cache subnet group" type = list(string) From 9495dfda942307ab6c65163eb05e56d89e102676 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 17:13:50 +0530 Subject: [PATCH 09/35] refactor/clean database module --- infrastructure/main.tf | 14 +-- infrastructure/modules/database/main.tf | 119 ++++++++----------- infrastructure/modules/database/outputs.tf | 12 +- infrastructure/modules/database/variables.tf | 68 +++++++++-- 4 files changed, 120 insertions(+), 93 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index ac0c193d1f..e0511221ff 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -51,21 +51,21 @@ module "storage" { } module "database" { - source = "./modules/database" - + common_tags = local.common_tags db_allocated_storage = var.db_allocated_storage + db_backup_retention_period = var.db_backup_retention_period db_engine_version = var.db_engine_version db_instance_class = var.db_instance_class db_name = var.db_name db_password = var.db_password - db_username = var.db_username db_storage_type = var.db_storage_type - db_backup_retention_period = var.db_backup_retention_period db_subnet_ids = module.networking.private_subnet_ids - security_group_ids = [module.security.rds_sg_id] - proxy_security_group_ids = [module.security.rds_proxy_sg_id] - project_name = var.project_name + db_username = var.db_username environment = var.environment + project_name = var.project_name + proxy_security_group_ids = [module.security.rds_proxy_sg_id] + security_group_ids = [module.security.rds_sg_id] + source = "./modules/database" } module "cache" { diff --git a/infrastructure/modules/database/main.tf b/infrastructure/modules/database/main.tf index 8f695ec0c4..b74eaaafdc 100644 --- a/infrastructure/modules/database/main.tf +++ b/infrastructure/modules/database/main.tf @@ -13,80 +13,70 @@ terraform { } } -# DB Subnet Group +locals { + db_password = local.generate_db_password ? random_password.db_password[0].result : var.db_password + generate_db_password = var.db_password == null || var.db_password == "" +} + resource "aws_db_subnet_group" "main" { name = "${var.project_name}-${var.environment}-db-subnet-group" subnet_ids = var.db_subnet_ids - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-db-subnet-group" - } + }) } -# Random password for RDS (if not provided) resource "random_password" "db_password" { - count = var.db_password == null || var.db_password == "" ? 1 : 0 - - length = 32 - special = true + count = local.generate_db_password ? 1 : 0 + length = 32 # Avoid special characters that might cause issues override_special = "!#$%&*()-_=+[]{}<>:?" + special = true } -# RDS PostgreSQL Instance resource "aws_db_instance" "main" { - identifier = lower("${var.project_name}-${var.environment}-db") - engine = "postgres" - engine_version = var.db_engine_version - instance_class = var.db_instance_class - allocated_storage = var.db_allocated_storage - storage_type = var.db_storage_type - storage_encrypted = true - - db_name = var.db_name - username = var.db_username - password = var.db_password != null && var.db_password != "" ? var.db_password : random_password.db_password[0].result - - db_subnet_group_name = aws_db_subnet_group.main.name - vpc_security_group_ids = var.security_group_ids - publicly_accessible = false - - backup_retention_period = var.db_backup_retention_period - backup_window = "03:00-04:00" - maintenance_window = "mon:04:00-mon:05:00" - - # Enable automated backups - skip_final_snapshot = true - copy_tags_to_snapshot = true - - # Performance Insights - enabled_cloudwatch_logs_exports = ["postgresql", "upgrade"] - - tags = { + allocated_storage = var.db_allocated_storage + backup_retention_period = var.db_backup_retention_period + backup_window = var.db_backup_window + copy_tags_to_snapshot = var.db_copy_tags_to_snapshot + db_name = var.db_name + db_subnet_group_name = aws_db_subnet_group.main.name + enabled_cloudwatch_logs_exports = var.db_enabled_cloudwatch_logs_exports + engine = "postgres" + engine_version = var.db_engine_version + identifier = lower("${var.project_name}-${var.environment}-db") + instance_class = var.db_instance_class + maintenance_window = var.db_maintenance_window + password = local.db_password + publicly_accessible = false + skip_final_snapshot = var.db_skip_final_snapshot + storage_encrypted = true + storage_type = var.db_storage_type + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-postgres" - } + }) + username = var.db_username + vpc_security_group_ids = var.security_group_ids } -# Secrets Manager Secret for DB Credentials resource "aws_secretsmanager_secret" "db_credentials" { - name = "${var.project_name}-${var.environment}-db-credentials" - tags = { + description = "Stores the credentials for the RDS database." + name = "${var.project_name}-${var.environment}-db-credentials" + recovery_window_in_days = var.secret_recovery_window_in_days + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-db-credentials" - } - recovery_window_in_days = 0 + }) } resource "aws_secretsmanager_secret_version" "db_credentials" { secret_id = aws_secretsmanager_secret.db_credentials.id secret_string = jsonencode({ username = var.db_username - password = var.db_password != null && var.db_password != "" ? var.db_password : random_password.db_password[0].result + password = local.db_password }) } -# IAM Role for RDS Proxy resource "aws_iam_role" "rds_proxy" { - name = "${var.project_name}-${var.environment}-rds-proxy-role" assume_role_policy = jsonencode({ Version = "2012-10-17" Statement = [ @@ -99,14 +89,14 @@ resource "aws_iam_role" "rds_proxy" { } ] }) - tags = { + name = "${var.project_name}-${var.environment}-rds-proxy-role" + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-rds-proxy-role" - } + }) } resource "aws_iam_role_policy" "rds_proxy" { name = "${var.project_name}-${var.environment}-rds-proxy-policy" - role = aws_iam_role.rds_proxy.id policy = jsonencode({ Version = "2012-10-17" Statement = [ @@ -119,43 +109,38 @@ resource "aws_iam_role_policy" "rds_proxy" { } ] }) + role = aws_iam_role.rds_proxy.id } -# RDS Proxy resource "aws_db_proxy" "main" { - name = "${var.project_name}-${var.environment}-proxy" - debug_logging = false - engine_family = "POSTGRESQL" - idle_client_timeout = 1800 - require_tls = true - role_arn = aws_iam_role.rds_proxy.arn - vpc_security_group_ids = var.proxy_security_group_ids - vpc_subnet_ids = var.db_subnet_ids - auth { auth_scheme = "SECRETS" description = "Database credentials" iam_auth = "DISABLED" secret_arn = aws_secretsmanager_secret.db_credentials.arn } - - tags = { + debug_logging = false + engine_family = "POSTGRESQL" + idle_client_timeout = 1800 + name = "${var.project_name}-${var.environment}-proxy" + require_tls = true + role_arn = aws_iam_role.rds_proxy.arn + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-rds-proxy" - } + }) + vpc_security_group_ids = var.proxy_security_group_ids + vpc_subnet_ids = var.db_subnet_ids } -# RDS Proxy Default Target Group resource "aws_db_proxy_default_target_group" "main" { - db_proxy_name = aws_db_proxy.main.name - connection_pool_config { connection_borrow_timeout = 120 max_connections_percent = 100 max_idle_connections_percent = 50 } + db_proxy_name = aws_db_proxy.main.name } -# RDS Proxy Target resource "aws_db_proxy_target" "main" { db_instance_identifier = aws_db_instance.main.identifier db_proxy_name = aws_db_proxy.main.name diff --git a/infrastructure/modules/database/outputs.tf b/infrastructure/modules/database/outputs.tf index 3e62f2e0cc..121c915493 100644 --- a/infrastructure/modules/database/outputs.tf +++ b/infrastructure/modules/database/outputs.tf @@ -1,10 +1,10 @@ -output "db_proxy_endpoint" { - description = "The endpoint of the RDS proxy" - value = aws_db_proxy.main.endpoint -} - output "db_password" { description = "The password for the RDS database" - value = random_password.db_password[0].result + value = local.db_password sensitive = true } + +output "db_proxy_endpoint" { + description = "The endpoint of the RDS proxy" + value = aws_db_proxy.main.endpoint +} diff --git a/infrastructure/modules/database/variables.tf b/infrastructure/modules/database/variables.tf index b9dba03f95..6977cd7d89 100644 --- a/infrastructure/modules/database/variables.tf +++ b/infrastructure/modules/database/variables.tf @@ -1,8 +1,38 @@ +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + variable "db_allocated_storage" { description = "The allocated storage for the RDS database in GB" type = number } +variable "db_backup_retention_period" { + description = "The number of days to retain backups for" + type = number + default = 7 +} + +variable "db_backup_window" { + description = "The daily time range (in UTC) during which automated backups are created." + type = string + default = "03:00-04:00" +} + +variable "db_copy_tags_to_snapshot" { + description = "Specifies whether to copy all instance tags to snapshots." + type = bool + default = true +} + +variable "db_enabled_cloudwatch_logs_exports" { + description = "List of log types to export to CloudWatch Logs." + type = list(string) + default = ["postgresql", "upgrade"] +} + variable "db_engine_version" { description = "The version of the PostgreSQL engine" type = string @@ -13,6 +43,12 @@ variable "db_instance_class" { type = string } +variable "db_maintenance_window" { + description = "The weekly time range (in UTC) during which system maintenance can occur." + type = string + default = "mon:04:00-mon:05:00" +} + variable "db_name" { description = "The name of the RDS database" type = string @@ -25,6 +61,18 @@ variable "db_password" { default = null } +variable "db_skip_final_snapshot" { + description = "Determines whether a final DB snapshot is created before the DB instance is deleted." + type = bool + default = true +} + +variable "db_storage_type" { + description = "The storage type for the RDS database" + type = string + default = "gp3" +} + variable "db_subnet_ids" { description = "A list of subnet IDs for the DB subnet group" type = list(string) @@ -45,24 +93,18 @@ variable "project_name" { type = string } -variable "security_group_ids" { - description = "A list of security group IDs to associate with the RDS database" +variable "proxy_security_group_ids" { + description = "A list of security group IDs to associate with the RDS proxy" type = list(string) } -variable "db_storage_type" { - description = "The storage type for the RDS database" - type = string - default = "gp3" -} - -variable "db_backup_retention_period" { - description = "The number of days to retain backups for" +variable "secret_recovery_window_in_days" { + description = "The number of days that Secrets Manager waits before it can delete the secret. Set to 0 to delete immediately." type = number - default = 7 + default = 0 } -variable "proxy_security_group_ids" { - description = "A list of security group IDs to associate with the RDS proxy" +variable "security_group_ids" { + description = "A list of security group IDs to associate with the RDS database" type = list(string) } From df72c9a1c96be0220c45d38ca840ee065cef3e3c Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 18:04:29 +0530 Subject: [PATCH 10/35] refactor/clean ecs module --- infrastructure/main.tf | 53 +- infrastructure/modules/ecs/main.tf | 639 ++++-------------- .../modules/ecs/modules/task/main.tf | 80 +++ .../modules/ecs/modules/task/variables.tf | 83 +++ infrastructure/modules/ecs/variables.tf | 196 ++---- 5 files changed, 367 insertions(+), 684 deletions(-) create mode 100644 infrastructure/modules/ecs/modules/task/main.tf create mode 100644 infrastructure/modules/ecs/modules/task/variables.tf diff --git a/infrastructure/main.tf b/infrastructure/main.tf index e0511221ff..6d5d131a1a 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -19,6 +19,26 @@ locals { ManagedBy = "Terraform" Project = var.project_name } + django_environment_variables = { + DJANGO_ALGOLIA_APPLICATION_ID = var.django_algolia_application_id + DJANGO_ALGOLIA_WRITE_API_KEY = var.django_algolia_write_api_key + DJANGO_ALLOWED_HOSTS = var.django_allowed_hosts + DJANGO_AWS_ACCESS_KEY_ID = var.django_aws_access_key_id + DJANGO_AWS_SECRET_ACCESS_KEY = var.django_aws_secret_access_key + DJANGO_CONFIGURATION = var.django_configuration + DJANGO_DB_HOST = var.django_db_host + DJANGO_DB_NAME = var.django_db_name + DJANGO_DB_USER = var.django_db_user + DJANGO_DB_PORT = var.django_db_port + DJANGO_DB_PASSWORD = var.django_db_password + DJANGO_OPEN_AI_SECRET_KEY = var.django_open_ai_secret_key + DJANGO_REDIS_HOST = var.django_redis_host + DJANGO_REDIS_PASSWORD = var.django_redis_password + DJANGO_SECRET_KEY = var.django_secret_key + DJANGO_SENTRY_DSN = var.django_sentry_dsn + DJANGO_SLACK_BOT_TOKEN = var.django_slack_bot_token + DJANGO_SLACK_SIGNING_SECRET = var.django_slack_signing_secret + } } module "networking" { @@ -51,6 +71,8 @@ module "storage" { } module "database" { + source = "./modules/database" + common_tags = local.common_tags db_allocated_storage = var.db_allocated_storage db_backup_retention_period = var.db_backup_retention_period @@ -65,7 +87,6 @@ module "database" { project_name = var.project_name proxy_security_group_ids = [module.security.rds_proxy_sg_id] security_group_ids = [module.security.rds_sg_id] - source = "./modules/database" } module "cache" { @@ -86,27 +107,11 @@ module "cache" { module "ecs" { source = "./modules/ecs" - project_name = var.project_name - environment = var.environment - aws_region = var.aws_region - private_subnet_ids = module.networking.private_subnet_ids - lambda_sg_id = module.security.lambda_sg_id - django_algolia_application_id = var.django_algolia_application_id - django_algolia_write_api_key = var.django_algolia_write_api_key - django_allowed_hosts = var.django_allowed_hosts - django_aws_access_key_id = var.django_aws_access_key_id - django_aws_secret_access_key = var.django_aws_secret_access_key - django_configuration = var.django_configuration - django_db_host = var.django_db_host - django_db_name = var.django_db_name - django_db_user = var.django_db_user - django_db_port = var.django_db_port - django_db_password = var.django_db_password - django_open_ai_secret_key = var.django_open_ai_secret_key - django_redis_host = var.django_redis_host - django_redis_password = var.django_redis_password - django_secret_key = var.django_secret_key - django_sentry_dsn = var.django_sentry_dsn - django_slack_bot_token = var.django_slack_bot_token - django_slack_signing_secret = var.django_slack_signing_secret + aws_region = var.aws_region + common_tags = local.common_tags + django_environment_variables = local.django_environment_variables + environment = var.environment + lambda_sg_id = module.security.lambda_sg_id + private_subnet_ids = module.networking.private_subnet_ids + project_name = var.project_name } diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index d72a2f9235..8c59bb649b 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -9,27 +9,19 @@ terraform { } } -locals { - common_tags = { - Project = var.project_name - Environment = var.environment - ManagedBy = "Terraform" - } -} - resource "aws_ecs_cluster" "main" { name = "${var.project_name}-${var.environment}-cluster" - tags = local.common_tags + tags = var.common_tags } resource "aws_ecr_repository" "main" { name = "${var.project_name}-${var.environment}-backend" - tags = local.common_tags + tags = var.common_tags } resource "aws_iam_role" "ecs_tasks_execution_role" { name = "${var.project_name}-${var.environment}-ecs-tasks-execution-role" - tags = local.common_tags + tags = var.common_tags assume_role_policy = jsonencode({ Version = "2012-10-17" @@ -52,7 +44,7 @@ resource "aws_iam_role_policy_attachment" "ecs_tasks_execution_role_policy" { resource "aws_iam_role" "event_bridge_role" { name = "${var.project_name}-${var.environment}-event-bridge-role" - tags = local.common_tags + tags = var.common_tags assume_role_policy = jsonencode({ Version = "2012-10-17" @@ -73,501 +65,130 @@ resource "aws_iam_role_policy_attachment" "event_bridge_role_policy" { policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonEC2ContainerServiceEventsRole" } -# Task defitions -resource "aws_ecs_task_definition" "sync_data" { - family = "${var.project_name}-${var.environment}-sync-data" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.sync_data_task_cpu - memory = var.sync_data_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = aws_ecr_repository.main.repository_url - command = ["python", "manage.py", "sync-data"] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.sync_data.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - } - ]) -} - -resource "aws_cloudwatch_log_group" "sync_data" { - name = "/ecs/${var.project_name}-${var.environment}-sync-data" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_rule" "sync_data" { - name = "${var.project_name}-${var.environment}-sync-data-rule" - description = "Fires daily to trigger the sync-data task" - schedule_expression = "cron(17 05 * * ? *)" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_target" "sync_data" { - rule = aws_cloudwatch_event_rule.sync_data.name - target_id = "${var.project_name}-${var.environment}-sync-data-target" - arn = aws_ecs_cluster.main.arn - - ecs_target { - task_definition_arn = aws_ecs_task_definition.sync_data.arn - launch_type = "FARGATE" - network_configuration { - subnets = var.private_subnet_ids - security_groups = [var.lambda_sg_id] - assign_public_ip = false - } - } - - role_arn = aws_iam_role.event_bridge_role.arn -} - -resource "aws_ecs_task_definition" "owasp_update_project_health_metrics" { - family = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.update_project_health_metrics_task_cpu - memory = var.update_project_health_metrics_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = aws_ecr_repository.main.repository_url - command = ["/bin/sh", "-c", "python manage.py owasp-update-project-health-requirements && python manage.py owasp-update-project-health-metrics"] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.owasp_update_project_health_metrics.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - } - ]) -} - -resource "aws_cloudwatch_log_group" "owasp_update_project_health_metrics" { - name = "/ecs/${var.project_name}-${var.environment}-owasp-update-project-health-metrics" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_rule" "owasp_update_project_health_metrics" { - name = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics-rule" - description = "Fires daily to trigger the owasp-update-project-health-metrics task" - schedule_expression = "cron(17 17 * * ? *)" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_target" "owasp_update_project_health_metrics" { - rule = aws_cloudwatch_event_rule.owasp_update_project_health_metrics.name - target_id = "${var.project_name}-${var.environment}-owasp-update-project-health-metrics-target" - arn = aws_ecs_cluster.main.arn - - ecs_target { - task_definition_arn = aws_ecs_task_definition.owasp_update_project_health_metrics.arn - launch_type = "FARGATE" - network_configuration { - subnets = var.private_subnet_ids - security_groups = [var.lambda_sg_id] - assign_public_ip = false - } - } - - role_arn = aws_iam_role.event_bridge_role.arn -} - -resource "aws_ecs_task_definition" "owasp_update_project_health_scores" { - family = "${var.project_name}-${var.environment}-owasp-update-project-health-scores" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.update_project_health_scores_task_cpu - memory = var.update_project_health_scores_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = aws_ecr_repository.main.repository_url - command = ["python", "manage.py", "owasp-update-project-health-scores"] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.owasp_update_project_health_scores.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - } - ]) -} - -resource "aws_cloudwatch_log_group" "owasp_update_project_health_scores" { - name = "/ecs/${var.project_name}-${var.environment}-owasp-update-project-health-scores" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_rule" "owasp_update_project_health_scores" { - name = "${var.project_name}-${var.environment}-owasp-update-project-health-scores-rule" - description = "Fires daily to trigger the owasp-update-project-health-scores task" - schedule_expression = "cron(22 17 * * ? *)" - tags = local.common_tags -} - -resource "aws_cloudwatch_event_target" "owasp_update_project_health_scores" { - rule = aws_cloudwatch_event_rule.owasp_update_project_health_scores.name - target_id = "${var.project_name}-${var.environment}-owasp-update-project-health-scores-target" - arn = aws_ecs_cluster.main.arn - - ecs_target { - task_definition_arn = aws_ecs_task_definition.owasp_update_project_health_scores.arn - launch_type = "FARGATE" - network_configuration { - subnets = var.private_subnet_ids - security_groups = [var.lambda_sg_id] - assign_public_ip = false - } - } - - role_arn = aws_iam_role.event_bridge_role.arn -} - -# One time tasks -resource "aws_ecs_task_definition" "migrate" { - family = "${var.project_name}-${var.environment}-migrate" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.migrate_task_cpu - memory = var.migrate_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = "${aws_ecr_repository.main.repository_url}:latest" - command = ["python", "manage.py", "migrate"] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.load_data.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - environment = [ - { - name = "DJANGO_ALGOLIA_APPLICATION_ID" - value = var.django_algolia_application_id - }, - { - name = "DJANGO_ALGOLIA_WRITE_API_KEY" - value = var.django_algolia_write_api_key - }, - { - name = "DJANGO_ALLOWED_HOSTS" - value = var.django_allowed_hosts - }, - { - name = "DJANGO_AWS_ACCESS_KEY_ID" - value = var.django_aws_access_key_id - }, - { - name = "DJANGO_AWS_SECRET_ACCESS_KEY" - value = var.django_aws_secret_access_key - }, - { - name = "DJANGO_CONFIGURATION" - value = var.django_configuration - }, - { - name = "DJANGO_DB_HOST" - value = var.django_db_host - }, - { - name = "DJANGO_DB_NAME" - value = var.django_db_name - }, - { - name = "DJANGO_DB_USER" - value = var.django_db_user - }, - { - name = "DJANGO_DB_PORT" - value = var.django_db_port - }, - { - name = "DJANGO_DB_PASSWORD" - value = var.django_db_password - }, - { - name = "DJANGO_OPEN_AI_SECRET_KEY" - value = var.django_open_ai_secret_key - }, - { - name = "DJANGO_REDIS_HOST" - value = var.django_redis_host - }, - { - name = "DJANGO_REDIS_PASSWORD" - value = var.django_redis_password - }, - { - name = "DJANGO_SECRET_KEY" - value = var.django_secret_key - }, - { - name = "DJANGO_SENTRY_DSN" - value = var.django_sentry_dsn - }, - { - name = "DJANGO_SLACK_BOT_TOKEN" - value = var.django_slack_bot_token - }, - { - name = "DJANGO_SLACK_SIGNING_SECRET" - value = var.django_slack_signing_secret - } - ] - } - ]) -} - -resource "aws_cloudwatch_log_group" "migrate" { - name = "/ecs/${var.project_name}-${var.environment}-migrate" - tags = local.common_tags -} - -resource "aws_ecs_task_definition" "load_data" { - family = "${var.project_name}-${var.environment}-load-data" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.load_data_task_cpu - memory = var.load_data_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = "${aws_ecr_repository.main.repository_url}:latest" - command = ["python", "manage.py", "load_data"] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.load_data.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - environment = [ - { - name = "DJANGO_ALGOLIA_APPLICATION_ID" - value = var.django_algolia_application_id - }, - { - name = "DJANGO_ALGOLIA_WRITE_API_KEY" - value = var.django_algolia_write_api_key - }, - { - name = "DJANGO_ALLOWED_HOSTS" - value = var.django_allowed_hosts - }, - { - name = "DJANGO_AWS_ACCESS_KEY_ID" - value = var.django_aws_access_key_id - }, - { - name = "DJANGO_AWS_SECRET_ACCESS_KEY" - value = var.django_aws_secret_access_key - }, - { - name = "DJANGO_CONFIGURATION" - value = var.django_configuration - }, - { - name = "DJANGO_DB_HOST" - value = var.django_db_host - }, - { - name = "DJANGO_DB_NAME" - value = var.django_db_name - }, - { - name = "DJANGO_DB_USER" - value = var.django_db_user - }, - { - name = "DJANGO_DB_PORT" - value = var.django_db_port - }, - { - name = "DJANGO_DB_PASSWORD" - value = var.django_db_password - }, - { - name = "DJANGO_OPEN_AI_SECRET_KEY" - value = var.django_open_ai_secret_key - }, - { - name = "DJANGO_REDIS_HOST" - value = var.django_redis_host - }, - { - name = "DJANGO_REDIS_PASSWORD" - value = var.django_redis_password - }, - { - name = "DJANGO_SECRET_KEY" - value = var.django_secret_key - }, - { - name = "DJANGO_SENTRY_DSN" - value = var.django_sentry_dsn - }, - { - name = "DJANGO_SLACK_BOT_TOKEN" - value = var.django_slack_bot_token - }, - { - name = "DJANGO_SLACK_SIGNING_SECRET" - value = var.django_slack_signing_secret - } - ] - } - ]) -} - -resource "aws_cloudwatch_log_group" "load_data" { - name = "/ecs/${var.project_name}-${var.environment}-load-data" - tags = local.common_tags -} - -resource "aws_ecs_task_definition" "index_data" { - family = "${var.project_name}-${var.environment}-index-data" - network_mode = "awsvpc" - requires_compatibilities = ["FARGATE"] - cpu = var.index_data_task_cpu - memory = var.index_data_task_memory - execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn - tags = local.common_tags - - container_definitions = jsonencode([ - { - name = "backend" - image = "${aws_ecr_repository.main.repository_url}:latest" - command = [ - "/bin/sh", - "-c", - <<-EOT - python manage.py algolia_reindex - python manage.py algolia_update_replicas - python manage.py algolia_update_synonyms - EOT - ] - essential = true - logConfiguration = { - logDriver = "awslogs" - options = { - "awslogs-group" = aws_cloudwatch_log_group.load_data.name - "awslogs-region" = var.aws_region - "awslogs-stream-prefix" = "ecs" - } - } - environment = [ - { - name = "DJANGO_ALGOLIA_APPLICATION_ID" - value = var.django_algolia_application_id - }, - { - name = "DJANGO_ALGOLIA_WRITE_API_KEY" - value = var.django_algolia_write_api_key - }, - { - name = "DJANGO_ALLOWED_HOSTS" - value = var.django_allowed_hosts - }, - { - name = "DJANGO_AWS_ACCESS_KEY_ID" - value = var.django_aws_access_key_id - }, - { - name = "DJANGO_AWS_SECRET_ACCESS_KEY" - value = var.django_aws_secret_access_key - }, - { - name = "DJANGO_CONFIGURATION" - value = var.django_configuration - }, - { - name = "DJANGO_DB_HOST" - value = var.django_db_host - }, - { - name = "DJANGO_DB_NAME" - value = var.django_db_name - }, - { - name = "DJANGO_DB_USER" - value = var.django_db_user - }, - { - name = "DJANGO_DB_PORT" - value = var.django_db_port - }, - { - name = "DJANGO_DB_PASSWORD" - value = var.django_db_password - }, - { - name = "DJANGO_OPEN_AI_SECRET_KEY" - value = var.django_open_ai_secret_key - }, - { - name = "DJANGO_REDIS_HOST" - value = var.django_redis_host - }, - { - name = "DJANGO_REDIS_PASSWORD" - value = var.django_redis_password - }, - { - name = "DJANGO_SECRET_KEY" - value = var.django_secret_key - }, - { - name = "DJANGO_SENTRY_DSN" - value = var.django_sentry_dsn - }, - { - name = "DJANGO_SLACK_BOT_TOKEN" - value = var.django_slack_bot_token - }, - { - name = "DJANGO_SLACK_SIGNING_SECRET" - value = var.django_slack_signing_secret - } - ] - } - ]) -} - -resource "aws_cloudwatch_log_group" "index_data" { - name = "/ecs/${var.project_name}-${var.environment}-index-data" - tags = local.common_tags +module "sync_data_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = ["python", "manage.py", "sync-data"] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.sync_data_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + event_bridge_role_arn = aws_iam_role.event_bridge_role.arn + image_url = aws_ecr_repository.main.repository_url + memory = var.sync_data_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + schedule_expression = "cron(17 05 * * ? *)" + security_group_ids = [var.lambda_sg_id] + task_name = "sync-data" +} + +module "owasp_update_project_health_metrics_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = ["/bin/sh", "-c", "python manage.py owasp-update-project-health-requirements && python manage.py owasp-update-project-health-metrics"] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.update_project_health_metrics_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + event_bridge_role_arn = aws_iam_role.event_bridge_role.arn + image_url = aws_ecr_repository.main.repository_url + memory = var.update_project_health_metrics_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + schedule_expression = "cron(17 17 * * ? *)" + security_group_ids = [var.lambda_sg_id] + task_name = "owasp-update-project-health-metrics" +} + +module "owasp_update_project_health_scores_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = ["python", "manage.py", "owasp-update-project-health-scores"] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.update_project_health_scores_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + event_bridge_role_arn = aws_iam_role.event_bridge_role.arn + image_url = aws_ecr_repository.main.repository_url + memory = var.update_project_health_scores_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + schedule_expression = "cron(22 17 * * ? *)" + security_group_ids = [var.lambda_sg_id] + task_name = "owasp-update-project-health-scores" +} + +module "migrate_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = ["python", "manage.py", "migrate"] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.migrate_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + image_url = "${aws_ecr_repository.main.repository_url}:latest" + memory = var.migrate_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + security_group_ids = [var.lambda_sg_id] + task_name = "migrate" +} + +module "load_data_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = ["python", "manage.py", "load_data"] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.load_data_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + image_url = "${aws_ecr_repository.main.repository_url}:latest" + memory = var.load_data_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + security_group_ids = [var.lambda_sg_id] + task_name = "load-data" +} + +module "index_data_task" { + source = "./modules/task" + + aws_region = var.aws_region + command = [ + "/bin/sh", + "-c", + <<-EOT + python manage.py algolia_reindex + python manage.py algolia_update_replicas + python manage.py algolia_update_synonyms + EOT + ] + common_tags = var.common_tags + container_environment = var.django_environment_variables + cpu = var.index_data_task_cpu + ecs_cluster_arn = aws_ecs_cluster.main.arn + ecs_tasks_execution_role_arn = aws_iam_role.ecs_tasks_execution_role.arn + environment = var.environment + image_url = "${aws_ecr_repository.main.repository_url}:latest" + memory = var.index_data_task_memory + private_subnet_ids = var.private_subnet_ids + project_name = var.project_name + security_group_ids = [var.lambda_sg_id] + task_name = "index-data" } diff --git a/infrastructure/modules/ecs/modules/task/main.tf b/infrastructure/modules/ecs/modules/task/main.tf new file mode 100644 index 0000000000..7120aca6cd --- /dev/null +++ b/infrastructure/modules/ecs/modules/task/main.tf @@ -0,0 +1,80 @@ +terraform { + required_version = ">= 1.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 6.0" + } + } +} + +resource "aws_cloudwatch_log_group" "task" { + name = "/ecs/${var.project_name}-${var.environment}-${var.task_name}" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-${var.task_name}-logs" + }) +} + +resource "aws_ecs_task_definition" "task" { + family = "${var.project_name}-${var.environment}-${var.task_name}" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = var.cpu + memory = var.memory + execution_role_arn = var.ecs_tasks_execution_role_arn + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-${var.task_name}-task-def" + }) + + container_definitions = jsonencode([ + { + name = "backend" + image = var.image_url + command = var.command + essential = true + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = aws_cloudwatch_log_group.task.name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "ecs" + } + } + environment = [for name, value in var.container_environment : { + name = name + value = value + }] + } + ]) +} + +resource "aws_cloudwatch_event_rule" "task" { + count = var.schedule_expression != null ? 1 : 0 + + name = "${var.project_name}-${var.environment}-${var.task_name}-rule" + description = "Fires on a schedule to trigger the ${var.task_name} task" + schedule_expression = var.schedule_expression + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-${var.task_name}-rule" + }) +} + +resource "aws_cloudwatch_event_target" "task" { + count = var.schedule_expression != null ? 1 : 0 + + rule = aws_cloudwatch_event_rule.task[0].name + target_id = "${var.project_name}-${var.environment}-${var.task_name}-target" + arn = var.ecs_cluster_arn + role_arn = var.event_bridge_role_arn + + ecs_target { + task_definition_arn = aws_ecs_task_definition.task.arn + launch_type = "FARGATE" + network_configuration { + subnets = var.private_subnet_ids + security_groups = var.security_group_ids + assign_public_ip = false + } + } +} diff --git a/infrastructure/modules/ecs/modules/task/variables.tf b/infrastructure/modules/ecs/modules/task/variables.tf new file mode 100644 index 0000000000..bd8af473fc --- /dev/null +++ b/infrastructure/modules/ecs/modules/task/variables.tf @@ -0,0 +1,83 @@ +variable "aws_region" { + description = "The AWS region for the CloudWatch logs." + type = string +} + +variable "command" { + description = "The command to run in the container." + type = list(string) +} + +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + +variable "container_environment" { + description = "A map of environment variables to pass to the container." + type = map(string) + default = {} +} + +variable "cpu" { + description = "The CPU units to allocate for the task." + type = string +} + +variable "ecs_cluster_arn" { + description = "The ARN of the ECS cluster." + type = string +} + +variable "ecs_tasks_execution_role_arn" { + description = "The ARN of the ECS task execution role." + type = string +} + +variable "environment" { + description = "The environment (e.g., staging, production)." + type = string +} + +variable "event_bridge_role_arn" { + description = "The ARN of the EventBridge role to trigger the task. Only required for scheduled tasks." + type = string + default = null +} + +variable "image_url" { + description = "The URL of the ECR image to run." + type = string +} + +variable "memory" { + description = "The memory (in MiB) to allocate for the task." + type = string +} + +variable "private_subnet_ids" { + description = "A list of private subnet IDs for the task." + type = list(string) +} + +variable "project_name" { + description = "The name of the project." + type = string +} + +variable "schedule_expression" { + description = "The cron expression for the schedule. If null, the task is not scheduled." + type = string + default = null +} + +variable "security_group_ids" { + description = "A list of security group IDs to associate with the task." + type = list(string) +} + +variable "task_name" { + description = "The unique name of the task." + type = string +} diff --git a/infrastructure/modules/ecs/variables.tf b/infrastructure/modules/ecs/variables.tf index 53eb5b4bf4..c8dffa7925 100644 --- a/infrastructure/modules/ecs/variables.tf +++ b/infrastructure/modules/ecs/variables.tf @@ -1,75 +1,41 @@ -variable "project_name" { - description = "The name of the project" - type = string -} - -variable "environment" { - description = "The environment (e.g., staging, production)" - type = string -} - variable "aws_region" { description = "The AWS region" type = string } -variable "private_subnet_ids" { - description = "A list of private subnet IDs" - type = list(string) +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} } -variable "lambda_sg_id" { - description = "The ID of the security group for the Lambda function" - type = string -} - -variable "sync_data_task_cpu" { - description = "The CPU for the sync-data task" - type = string - default = "256" -} - -variable "sync_data_task_memory" { - description = "The memory for the sync-data task" - type = string - default = "512" -} - -variable "update_project_health_metrics_task_cpu" { - description = "The CPU for the update-project-health-metrics task" - type = string - default = "256" +variable "django_environment_variables" { + description = "A map of environment variables for the Django container." + type = map(string) + default = {} + sensitive = true } -variable "update_project_health_metrics_task_memory" { - description = "The memory for the update-project-health-metrics task" +variable "environment" { + description = "The environment (e.g., staging, production)" type = string - default = "512" } -variable "update_project_health_scores_task_cpu" { - description = "The CPU for the update-project-health-scores task" +variable "index_data_task_cpu" { + description = "The CPU for the index-data task" type = string default = "256" } -variable "update_project_health_scores_task_memory" { - description = "The memory for the update-project-health-scores task" - type = string - default = "512" -} - -# One time tasks -variable "migrate_task_cpu" { - description = "The CPU for the load-data task" +variable "index_data_task_memory" { + description = "The memory for the index-data task" type = string - default = "256" + default = "2048" } -variable "migrate_task_memory" { - description = "The memory for the load-data task" +variable "lambda_sg_id" { + description = "The ID of the security group for the Lambda function" type = string - default = "2048" } variable "load_data_task_cpu" { @@ -84,132 +50,60 @@ variable "load_data_task_memory" { default = "2048" } -variable "index_data_task_cpu" { - description = "The CPU for the index-data task" +variable "migrate_task_cpu" { + description = "The CPU for the load-data task" type = string default = "256" } -variable "index_data_task_memory" { - description = "The memory for the index-data task" +variable "migrate_task_memory" { + description = "The memory for the load-data task" type = string default = "2048" } -# Environment Variables (temporary) -variable "django_algolia_application_id" { - type = string - description = "Algolia application ID." - default = null -} - -variable "django_allowed_hosts" { - type = string - description = "Comma-separated list of allowed hosts for Django." - default = null -} - -variable "django_db_host" { - type = string - description = "Database host URL." - default = null -} - -variable "django_db_name" { - type = string - description = "Database name." - default = null -} - -variable "django_db_user" { - type = string - description = "Database user." - default = null -} - -variable "django_db_port" { - type = string - description = "Database port." - default = null -} - -variable "django_redis_host" { - type = string - description = "Redis host URL." - default = null -} - -variable "django_algolia_write_api_key" { - type = string - description = "Algolia write API key." - sensitive = true - default = null -} - -variable "django_aws_access_key_id" { - type = string - description = "AWS access key for Django." - sensitive = true - default = null -} - -variable "django_aws_secret_access_key" { - type = string - description = "AWS secret access key for Django." - sensitive = true - default = null -} -variable "django_configuration" { - type = string - description = "Django Configuration" - default = null +variable "private_subnet_ids" { + description = "A list of private subnet IDs" + type = list(string) } -variable "django_db_password" { +variable "project_name" { + description = "The name of the project" type = string - description = "Database password." - sensitive = true - default = null } -variable "django_open_ai_secret_key" { +variable "sync_data_task_cpu" { + description = "The CPU for the sync-data task" type = string - description = "OpenAI secret key." - sensitive = true - default = null + default = "256" } -variable "django_redis_password" { +variable "sync_data_task_memory" { + description = "The memory for the sync-data task" type = string - description = "Redis password." - sensitive = true - default = null + default = "512" } -variable "django_secret_key" { +variable "update_project_health_metrics_task_cpu" { + description = "The CPU for the update-project-health-metrics task" type = string - description = "Django secret key." - sensitive = true - default = null + default = "256" } -variable "django_sentry_dsn" { +variable "update_project_health_metrics_task_memory" { + description = "The memory for the update-project-health-metrics task" type = string - description = "Sentry DSN for error tracking." - sensitive = true - default = null + default = "512" } -variable "django_slack_bot_token" { +variable "update_project_health_scores_task_cpu" { + description = "The CPU for the update-project-health-scores task" type = string - description = "Slack bot token." - sensitive = true - default = null + default = "256" } -variable "django_slack_signing_secret" { +variable "update_project_health_scores_task_memory" { + description = "The memory for the update-project-health-scores task" type = string - description = "Slack signing secret." - sensitive = true - default = null + default = "512" } From 619400eb55c94c4d7498afa772b83c62906c0ad1 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 18:37:19 +0530 Subject: [PATCH 11/35] refactor/clean networking module --- infrastructure/main.tf | 9 +- infrastructure/modules/networking/main.tf | 90 +++++++------------ .../modules/networking/variables.tf | 6 ++ 3 files changed, 43 insertions(+), 62 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index 6d5d131a1a..596106d6a5 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -44,12 +44,13 @@ locals { module "networking" { source = "./modules/networking" - vpc_cidr = var.vpc_cidr - public_subnet_cidrs = var.public_subnet_cidrs - private_subnet_cidrs = var.private_subnet_cidrs availability_zones = var.availability_zones - project_name = var.project_name + common_tags = local.common_tags environment = var.environment + private_subnet_cidrs = var.private_subnet_cidrs + project_name = var.project_name + public_subnet_cidrs = var.public_subnet_cidrs + vpc_cidr = var.vpc_cidr } module "security" { diff --git a/infrastructure/modules/networking/main.tf b/infrastructure/modules/networking/main.tf index 7a105d214c..691ef83184 100644 --- a/infrastructure/modules/networking/main.tf +++ b/infrastructure/modules/networking/main.tf @@ -13,118 +13,92 @@ terraform { } } -# VPC resource "aws_vpc" "main" { cidr_block = var.vpc_cidr enable_dns_hostnames = true enable_dns_support = true - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-vpc" - } + }) } -# Internet Gateway resource "aws_internet_gateway" "main" { - vpc_id = aws_vpc.main.id - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-igw" - } + }) + vpc_id = aws_vpc.main.id } -# Public Subnets resource "aws_subnet" "public" { - count = length(var.public_subnet_cidrs) - - vpc_id = aws_vpc.main.id - cidr_block = var.public_subnet_cidrs[count.index] availability_zone = var.availability_zones[count.index] + cidr_block = var.public_subnet_cidrs[count.index] + count = length(var.public_subnet_cidrs) map_public_ip_on_launch = true - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-public-${var.availability_zones[count.index]}" Type = "Public" - } + }) + vpc_id = aws_vpc.main.id } -# Private Subnets resource "aws_subnet" "private" { - count = length(var.private_subnet_cidrs) - - vpc_id = aws_vpc.main.id - cidr_block = var.private_subnet_cidrs[count.index] availability_zone = var.availability_zones[count.index] - - tags = { + cidr_block = var.private_subnet_cidrs[count.index] + count = length(var.private_subnet_cidrs) + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-private-${var.availability_zones[count.index]}" Type = "Private" - } + }) + vpc_id = aws_vpc.main.id } -# Elastic IP for NAT Gateway resource "aws_eip" "nat" { - domain = "vpc" - - tags = { - Name = "${var.project_name}-${var.environment}-nat-eip" - } - depends_on = [aws_internet_gateway.main] + domain = "vpc" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-nat-eip" + }) } -# NAT Gateway (in first public subnet) resource "aws_nat_gateway" "main" { allocation_id = aws_eip.nat.id + depends_on = [aws_internet_gateway.main] subnet_id = aws_subnet.public[0].id - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-nat" - } - - depends_on = [aws_internet_gateway.main] + }) } -# Public Route Table resource "aws_route_table" "public" { - vpc_id = aws_vpc.main.id - route { cidr_block = "0.0.0.0/0" gateway_id = aws_internet_gateway.main.id } - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-public-rt" - } + }) + vpc_id = aws_vpc.main.id } -# Private Route Table resource "aws_route_table" "private" { - vpc_id = aws_vpc.main.id - route { cidr_block = "0.0.0.0/0" nat_gateway_id = aws_nat_gateway.main.id } - - tags = { + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-private-rt" - } + }) + vpc_id = aws_vpc.main.id } -# Associate public subnets with public route table resource "aws_route_table_association" "public" { - count = length(aws_subnet.public) - - subnet_id = aws_subnet.public[count.index].id + count = length(aws_subnet.public) route_table_id = aws_route_table.public.id + subnet_id = aws_subnet.public[count.index].id } -# Associate private subnets with private route table resource "aws_route_table_association" "private" { - count = length(aws_subnet.private) - - subnet_id = aws_subnet.private[count.index].id + count = length(aws_subnet.private) route_table_id = aws_route_table.private.id + subnet_id = aws_subnet.private[count.index].id } diff --git a/infrastructure/modules/networking/variables.tf b/infrastructure/modules/networking/variables.tf index 3af07da20e..74c108743d 100644 --- a/infrastructure/modules/networking/variables.tf +++ b/infrastructure/modules/networking/variables.tf @@ -3,6 +3,12 @@ variable "availability_zones" { type = list(string) } +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + variable "environment" { description = "The environment (e.g., staging, production)" type = string From b8b49fa1555bed4bdf25dc5cba2c3a2ce1f4bb96 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 19:03:06 +0530 Subject: [PATCH 12/35] address Sonar Qube bot suggestions --- .../modules/ecs/modules/task/main.tf | 3 +- .../modules/ecs/modules/task/variables.tf | 6 ++ infrastructure/modules/storage/main.tf | 58 +++++++++++++++++++ 3 files changed, 66 insertions(+), 1 deletion(-) diff --git a/infrastructure/modules/ecs/modules/task/main.tf b/infrastructure/modules/ecs/modules/task/main.tf index 7120aca6cd..a85ce0889a 100644 --- a/infrastructure/modules/ecs/modules/task/main.tf +++ b/infrastructure/modules/ecs/modules/task/main.tf @@ -10,7 +10,8 @@ terraform { } resource "aws_cloudwatch_log_group" "task" { - name = "/ecs/${var.project_name}-${var.environment}-${var.task_name}" + name = "/ecs/${var.project_name}-${var.environment}-${var.task_name}" + retention_in_days = var.log_retention_in_days tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-${var.task_name}-logs" }) diff --git a/infrastructure/modules/ecs/modules/task/variables.tf b/infrastructure/modules/ecs/modules/task/variables.tf index bd8af473fc..7c0a86c23e 100644 --- a/infrastructure/modules/ecs/modules/task/variables.tf +++ b/infrastructure/modules/ecs/modules/task/variables.tf @@ -51,6 +51,12 @@ variable "image_url" { type = string } +variable "log_retention_in_days" { + description = "The number of days to retain log events." + type = number + default = 30 +} + variable "memory" { description = "The memory (in MiB) to allocate for the task." type = string diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 15793e23bd..3e39da5a3f 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -23,6 +23,37 @@ resource "aws_s3_bucket" "zappa" { force_destroy = true } +# S3 Bucket for Zappa Logs +resource "aws_s3_bucket" "zappa_logs" { + bucket = "${var.zappa_s3_bucket}-logs" + + tags = { + Name = "${var.project_name}-${var.environment}-zappa-deployments-logs" + } +} + +resource "aws_s3_bucket_ownership_controls" "zappa_logs" { + bucket = aws_s3_bucket.zappa_logs.id + rule { + object_ownership = "BucketOwnerPreferred" + } +} + +# Grant log-delivery-write ACL to the new bucket +resource "aws_s3_bucket_acl" "zappa_logs" { + depends_on = [aws_s3_bucket_ownership_controls.zappa_logs] + bucket = aws_s3_bucket.zappa_logs.id + acl = "log-delivery-write" +} + +# Enable logging for the zappa bucket +resource "aws_s3_bucket_logging" "zappa" { + bucket = aws_s3_bucket.zappa.id + + target_bucket = aws_s3_bucket.zappa_logs.id + target_prefix = "log/" +} + # Block public access resource "aws_s3_bucket_public_access_block" "zappa" { bucket = aws_s3_bucket.zappa.id @@ -70,3 +101,30 @@ resource "aws_s3_bucket_lifecycle_configuration" "zappa" { } } } + +# Enforce HTTPS-only access +data "aws_iam_policy_document" "zappa" { + statement { + sid = "EnforceTls" + effect = "Deny" + principals { + type = "*" + identifiers = ["*"] + } + actions = ["s3:*"] + resources = [ + aws_s3_bucket.zappa.arn, + "${aws_s3_bucket.zappa.arn}/*", + ] + condition { + test = "Bool" + variable = "aws:SecureTransport" + values = ["false"] + } + } +} + +resource "aws_s3_bucket_policy" "zappa" { + bucket = aws_s3_bucket.zappa.id + policy = data.aws_iam_policy_document.zappa.json +} From 8c3569dc86354ad38953cbb633f24ce1ee1f8181 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 19:12:52 +0530 Subject: [PATCH 13/35] keep some Sonar Qube bot suggestions but add #NOSONAR --- infrastructure/modules/storage/main.tf | 33 +------------------------- 1 file changed, 1 insertion(+), 32 deletions(-) diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 3e39da5a3f..63de21b355 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -14,7 +14,7 @@ terraform { } # S3 Bucket for Zappa Deployments -resource "aws_s3_bucket" "zappa" { +resource "aws_s3_bucket" "zappa" { # NOSONAR bucket = var.zappa_s3_bucket tags = { @@ -23,37 +23,6 @@ resource "aws_s3_bucket" "zappa" { force_destroy = true } -# S3 Bucket for Zappa Logs -resource "aws_s3_bucket" "zappa_logs" { - bucket = "${var.zappa_s3_bucket}-logs" - - tags = { - Name = "${var.project_name}-${var.environment}-zappa-deployments-logs" - } -} - -resource "aws_s3_bucket_ownership_controls" "zappa_logs" { - bucket = aws_s3_bucket.zappa_logs.id - rule { - object_ownership = "BucketOwnerPreferred" - } -} - -# Grant log-delivery-write ACL to the new bucket -resource "aws_s3_bucket_acl" "zappa_logs" { - depends_on = [aws_s3_bucket_ownership_controls.zappa_logs] - bucket = aws_s3_bucket.zappa_logs.id - acl = "log-delivery-write" -} - -# Enable logging for the zappa bucket -resource "aws_s3_bucket_logging" "zappa" { - bucket = aws_s3_bucket.zappa.id - - target_bucket = aws_s3_bucket.zappa_logs.id - target_prefix = "log/" -} - # Block public access resource "aws_s3_bucket_public_access_block" "zappa" { bucket = aws_s3_bucket.zappa.id From 91b12e7dda9b74b5d51697cf1b2729e2ce4fb3c3 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 19:29:16 +0530 Subject: [PATCH 14/35] add terraform dictionary --- cspell/cspell.json | 2 ++ cspell/custom-dict.txt | 1 + cspell/package.json | 1 + cspell/pnpm-lock.yaml | 3 +++ 4 files changed, 7 insertions(+) diff --git a/cspell/cspell.json b/cspell/cspell.json index 86a2d86602..e5f8b26a20 100644 --- a/cspell/cspell.json +++ b/cspell/cspell.json @@ -34,6 +34,7 @@ "python", "rust", "software-terms", + "terraform", "win32" ], "enabled": true, @@ -58,6 +59,7 @@ "@cspell/dict-k8s/cspell-ext.json", "@cspell/dict-people-names/cspell-ext.json", "@cspell/dict-software-terms/cspell-ext.json", + "@cspell/dict-terraform/cspell-ext.json", "@cspell/dict-win32/cspell-ext.json" ], "useGitignore": true diff --git a/cspell/custom-dict.txt b/cspell/custom-dict.txt index 9a191d1b4a..0e22b0b997 100644 --- a/cspell/custom-dict.txt +++ b/cspell/custom-dict.txt @@ -66,6 +66,7 @@ gunicorn heroui hsl igoat +igw inlinehilite isanori jumpstart diff --git a/cspell/package.json b/cspell/package.json index 016082f774..f57328d279 100644 --- a/cspell/package.json +++ b/cspell/package.json @@ -8,6 +8,7 @@ "@cspell/dict-k8s": "^1.0.12", "@cspell/dict-people-names": "^1.1.14", "@cspell/dict-software-terms": "^4.2.5", + "@cspell/dict-terraform": "^1.1.3", "@cspell/dict-win32": "^2.0.9", "cspell": "^8.19.4" } diff --git a/cspell/pnpm-lock.yaml b/cspell/pnpm-lock.yaml index d72d010228..945f354ae8 100644 --- a/cspell/pnpm-lock.yaml +++ b/cspell/pnpm-lock.yaml @@ -32,6 +32,9 @@ importers: '@cspell/dict-software-terms': specifier: ^4.2.5 version: 4.2.5 + '@cspell/dict-terraform': + specifier: ^1.1.3 + version: 1.1.3 '@cspell/dict-win32': specifier: ^2.0.9 version: 2.0.9 From 92469ac70f1d08c0081180f93c31e38622bf4525 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 19:30:16 +0530 Subject: [PATCH 15/35] refactor/clean security module --- infrastructure/main.tf | 7 +- infrastructure/modules/security/main.tf | 98 +++++++++----------- infrastructure/modules/security/outputs.tf | 10 +- infrastructure/modules/security/variables.tf | 12 +++ 4 files changed, 66 insertions(+), 61 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index 596106d6a5..9e2ba5de3a 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -56,11 +56,12 @@ module "networking" { module "security" { source = "./modules/security" - vpc_id = module.networking.vpc_id + common_tags = local.common_tags db_port = var.db_port - redis_port = var.redis_port - project_name = var.project_name environment = var.environment + project_name = var.project_name + redis_port = var.redis_port + vpc_id = module.networking.vpc_id } module "storage" { diff --git a/infrastructure/modules/security/main.tf b/infrastructure/modules/security/main.tf index f8e224f588..a6b155f0b7 100644 --- a/infrastructure/modules/security/main.tf +++ b/infrastructure/modules/security/main.tf @@ -13,102 +13,94 @@ terraform { } } -# Lambda Security Group resource "aws_security_group" "lambda" { - name = "${var.project_name}-${var.environment}-lambda-sg" description = "Security group for Lambda functions (Zappa)" - vpc_id = var.vpc_id + name = "${var.project_name}-${var.environment}-lambda-sg" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-lambda-sg" + }) + vpc_id = var.vpc_id egress { + cidr_blocks = var.default_egress_cidr_blocks description = "Allow all outbound traffic" from_port = 0 - to_port = 0 protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - tags = { - Name = "${var.project_name}-${var.environment}-lambda-sg" + to_port = 0 } } -# RDS Proxy Security Group resource "aws_security_group" "rds_proxy" { - name = "${var.project_name}-${var.environment}-rds-proxy-sg" description = "Security group for RDS Proxy" - vpc_id = var.vpc_id + name = "${var.project_name}-${var.environment}-rds-proxy-sg" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-rds-proxy-sg" + }) + vpc_id = var.vpc_id + + egress { + cidr_blocks = var.default_egress_cidr_blocks + description = "Allow all outbound traffic" + from_port = 0 + protocol = "-1" + to_port = 0 + } ingress { description = "PostgreSQL from Lambda" from_port = var.db_port - to_port = var.db_port protocol = "tcp" security_groups = [aws_security_group.lambda.id] + to_port = var.db_port } +} + +resource "aws_security_group" "rds" { + description = "Security group for RDS PostgreSQL" + name = "${var.project_name}-${var.environment}-rds-sg" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-rds-sg" + }) + vpc_id = var.vpc_id egress { + cidr_blocks = var.default_egress_cidr_blocks description = "Allow all outbound traffic" from_port = 0 - to_port = 0 protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - tags = { - Name = "${var.project_name}-${var.environment}-rds-proxy-sg" + to_port = 0 } -} - -# RDS Security Group -resource "aws_security_group" "rds" { - name = "${var.project_name}-${var.environment}-rds-sg" - description = "Security group for RDS PostgreSQL" - vpc_id = var.vpc_id ingress { description = "PostgreSQL from RDS Proxy" from_port = var.db_port - to_port = var.db_port protocol = "tcp" security_groups = [aws_security_group.rds_proxy.id] + to_port = var.db_port } +} + +resource "aws_security_group" "redis" { + description = "Security group for ElastiCache Redis" + name = "${var.project_name}-${var.environment}-redis-sg" + tags = merge(var.common_tags, { + Name = "${var.project_name}-${var.environment}-redis-sg" + }) + vpc_id = var.vpc_id egress { + cidr_blocks = var.default_egress_cidr_blocks description = "Allow all outbound traffic" from_port = 0 - to_port = 0 protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - tags = { - Name = "${var.project_name}-${var.environment}-rds-sg" + to_port = 0 } -} - -# ElastiCache Security Group -resource "aws_security_group" "redis" { - name = "${var.project_name}-${var.environment}-redis-sg" - description = "Security group for ElastiCache Redis" - vpc_id = var.vpc_id ingress { description = "Redis from Lambda" from_port = var.redis_port - to_port = var.redis_port protocol = "tcp" security_groups = [aws_security_group.lambda.id] - } - - egress { - description = "Allow all outbound traffic" - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - tags = { - Name = "${var.project_name}-${var.environment}-redis-sg" + to_port = var.redis_port } } diff --git a/infrastructure/modules/security/outputs.tf b/infrastructure/modules/security/outputs.tf index 0c5d68ae4e..eea4186b25 100644 --- a/infrastructure/modules/security/outputs.tf +++ b/infrastructure/modules/security/outputs.tf @@ -3,6 +3,11 @@ output "lambda_sg_id" { value = aws_security_group.lambda.id } +output "rds_proxy_sg_id" { + description = "The ID of the RDS proxy security group" + value = aws_security_group.rds_proxy.id +} + output "rds_sg_id" { description = "The ID of the RDS security group" value = aws_security_group.rds.id @@ -12,8 +17,3 @@ output "redis_sg_id" { description = "The ID of the Redis security group" value = aws_security_group.redis.id } - -output "rds_proxy_sg_id" { - description = "The ID of the RDS proxy security group" - value = aws_security_group.rds_proxy.id -} diff --git a/infrastructure/modules/security/variables.tf b/infrastructure/modules/security/variables.tf index 7c86b46554..5c84a60699 100644 --- a/infrastructure/modules/security/variables.tf +++ b/infrastructure/modules/security/variables.tf @@ -1,8 +1,20 @@ +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + variable "db_port" { description = "The port for the RDS database" type = number } +variable "default_egress_cidr_blocks" { + description = "A list of CIDR blocks to allow for default egress traffic." + type = list(string) + default = ["0.0.0.0/0"] +} + variable "environment" { description = "The environment (e.g., staging, production)" type = string From 1a66c4fc6c39cc8eae7a3bd85e439acb741a6617 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 19:55:00 +0530 Subject: [PATCH 16/35] fix pre-commit hooks and add terraform_validate --- .github/workflows/run-ci-cd.yaml | 5 +++ .pre-commit-config.yaml | 2 + .../modules/cache/.terraform.lock.hcl | 45 +++++++++++++++++++ .../modules/database/.terraform.lock.hcl | 45 +++++++++++++++++++ .../modules/ecs/.terraform.lock.hcl | 25 +++++++++++ .../ecs/modules/task/.terraform.lock.hcl | 25 +++++++++++ .../modules/networking/.terraform.lock.hcl | 45 +++++++++++++++++++ .../modules/security/.terraform.lock.hcl | 45 +++++++++++++++++++ .../modules/storage/.terraform.lock.hcl | 45 +++++++++++++++++++ 9 files changed, 282 insertions(+) create mode 100644 infrastructure/modules/cache/.terraform.lock.hcl create mode 100644 infrastructure/modules/database/.terraform.lock.hcl create mode 100644 infrastructure/modules/ecs/.terraform.lock.hcl create mode 100644 infrastructure/modules/ecs/modules/task/.terraform.lock.hcl create mode 100644 infrastructure/modules/networking/.terraform.lock.hcl create mode 100644 infrastructure/modules/security/.terraform.lock.hcl create mode 100644 infrastructure/modules/storage/.terraform.lock.hcl diff --git a/.github/workflows/run-ci-cd.yaml b/.github/workflows/run-ci-cd.yaml index adef9776f3..c62a867a2f 100644 --- a/.github/workflows/run-ci-cd.yaml +++ b/.github/workflows/run-ci-cd.yaml @@ -55,6 +55,11 @@ jobs: restore-keys: | pre-commit-${{ runner.os }}- + - name: Setup TFLint + uses: terraform-linters/setup-tflint@v4 + with: + tflint_version: v0.59.1 + - name: Run pre-commit uses: pre-commit/action@2c7b3805fd2a0fd8c1884dcaebf91fc102a13ecd diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de0fa8008e..fe139574ed 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,6 +14,8 @@ repos: hooks: - id: terraform_fmt files: ^infrastructure/.*\.tf$ + - id: terraform_validate + files: ^infrastructure/.*\.tf$ - id: terraform_tflint files: ^infrastructure/.*\.tf$ diff --git a/infrastructure/modules/cache/.terraform.lock.hcl b/infrastructure/modules/cache/.terraform.lock.hcl new file mode 100644 index 0000000000..e15fbddce1 --- /dev/null +++ b/infrastructure/modules/cache/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + constraints = "~> 3.0" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} diff --git a/infrastructure/modules/database/.terraform.lock.hcl b/infrastructure/modules/database/.terraform.lock.hcl new file mode 100644 index 0000000000..e15fbddce1 --- /dev/null +++ b/infrastructure/modules/database/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + constraints = "~> 3.0" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} diff --git a/infrastructure/modules/ecs/.terraform.lock.hcl b/infrastructure/modules/ecs/.terraform.lock.hcl new file mode 100644 index 0000000000..2efdd35b34 --- /dev/null +++ b/infrastructure/modules/ecs/.terraform.lock.hcl @@ -0,0 +1,25 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} diff --git a/infrastructure/modules/ecs/modules/task/.terraform.lock.hcl b/infrastructure/modules/ecs/modules/task/.terraform.lock.hcl new file mode 100644 index 0000000000..2efdd35b34 --- /dev/null +++ b/infrastructure/modules/ecs/modules/task/.terraform.lock.hcl @@ -0,0 +1,25 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} diff --git a/infrastructure/modules/networking/.terraform.lock.hcl b/infrastructure/modules/networking/.terraform.lock.hcl new file mode 100644 index 0000000000..e15fbddce1 --- /dev/null +++ b/infrastructure/modules/networking/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + constraints = "~> 3.0" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} diff --git a/infrastructure/modules/security/.terraform.lock.hcl b/infrastructure/modules/security/.terraform.lock.hcl new file mode 100644 index 0000000000..e15fbddce1 --- /dev/null +++ b/infrastructure/modules/security/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + constraints = "~> 3.0" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} diff --git a/infrastructure/modules/storage/.terraform.lock.hcl b/infrastructure/modules/storage/.terraform.lock.hcl new file mode 100644 index 0000000000..e15fbddce1 --- /dev/null +++ b/infrastructure/modules/storage/.terraform.lock.hcl @@ -0,0 +1,45 @@ +# This file is maintained automatically by "terraform init". +# Manual edits may be lost in future updates. + +provider "registry.terraform.io/hashicorp/aws" { + version = "6.17.0" + constraints = "~> 6.0" + hashes = [ + "h1:65zxvr7oxROr5hqTWQtoS5HsGOBwUko7douoc9Azptc=", + "zh:157063d66cd4b5fc650f20f56127e19c9da5d135f4231f9ca0c19a1c0bf6e29d", + "zh:2050dc03304b42204e6c58bbb1a2afd4feeac7db55d7c06be77c6b1e2ab46a0f", + "zh:2a7f7751eef636ca064700cc4574b9b54a2596d9e2e86b91c45127410d9724c6", + "zh:335fd7bb44bebfc4dd1db1c013947e1dde2518c6f2d846aac13b7314414ce461", + "zh:545c248d2eb601a7b45a34313096cae0a5201ccf31e7fd99428357ef800051e0", + "zh:57d19883a6367c245e885856a1c5395c4c743c20feff631ea4ec7b5e16826281", + "zh:66d4f080b8c268d65e8c4758ed57234e5a19deff6073ffc3753b9a4cc177b54e", + "zh:6ad50de35970f15e1ed41d39742290c1be80600b7df3a9fbb4c02f353b9586cf", + "zh:7af42fa531e4dcb3ddb09f71ca988e90626abbf56a45981c2a6c01d0b364a51b", + "zh:9a6a535a879314a9137ec9d3e858b7c490a962050845cf62620ba2bf4ae916a8", + "zh:9b12af85486a96aedd8d7984b0ff811a4b42e3d88dad1a3fb4c0b580d04fa425", + "zh:ca213e0262c8f686fcd40e3fc84d67b8eea1596de988c13d4a8ecd4522ede669", + "zh:cc4132f682e9bf17c0649928ad92af4da07ffe7bccfe615d955225cdcf9e7f09", + "zh:dfe6de43496d2e2b6dff131fef6ada1e15f1fbba3d47235c751564d22003d05e", + "zh:e37d035fa02693a3d47fe636076cce50b6579b6adc0a36a7cf0456a2331c99ec", + ] +} + +provider "registry.terraform.io/hashicorp/random" { + version = "3.7.2" + constraints = "~> 3.0" + hashes = [ + "h1:356j/3XnXEKr9nyicLUufzoF4Yr6hRy481KIxRVpK0c=", + "zh:14829603a32e4bc4d05062f059e545a91e27ff033756b48afbae6b3c835f508f", + "zh:1527fb07d9fea400d70e9e6eb4a2b918d5060d604749b6f1c361518e7da546dc", + "zh:1e86bcd7ebec85ba336b423ba1db046aeaa3c0e5f921039b3f1a6fc2f978feab", + "zh:24536dec8bde66753f4b4030b8f3ef43c196d69cccbea1c382d01b222478c7a3", + "zh:29f1786486759fad9b0ce4fdfbbfece9343ad47cd50119045075e05afe49d212", + "zh:4d701e978c2dd8604ba1ce962b047607701e65c078cb22e97171513e9e57491f", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:7b8434212eef0f8c83f5a90c6d76feaf850f6502b61b53c329e85b3b281cba34", + "zh:ac8a23c212258b7976e1621275e3af7099e7e4a3d4478cf8d5d2a27f3bc3e967", + "zh:b516ca74431f3df4c6cf90ddcdb4042c626e026317a33c53f0b445a3d93b720d", + "zh:dc76e4326aec2490c1600d6871a95e78f9050f9ce427c71707ea412a2f2f1a62", + "zh:eac7b63e86c749c7d48f527671c7aee5b4e26c10be6ad7232d6860167f99dbb0", + ] +} From 6994e33d0850b80097c5fdf6e790d7f3e9cd1268 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 20:01:47 +0530 Subject: [PATCH 17/35] add SHA hash and remove terraform_validate --- .github/workflows/run-ci-cd.yaml | 2 +- .pre-commit-config.yaml | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/.github/workflows/run-ci-cd.yaml b/.github/workflows/run-ci-cd.yaml index c62a867a2f..e5caeec3fa 100644 --- a/.github/workflows/run-ci-cd.yaml +++ b/.github/workflows/run-ci-cd.yaml @@ -56,7 +56,7 @@ jobs: pre-commit-${{ runner.os }}- - name: Setup TFLint - uses: terraform-linters/setup-tflint@v4 + uses: terraform-linters/setup-tflint@dfcb455ed8d56b55edf6447a37379108181a6707 with: tflint_version: v0.59.1 diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index fe139574ed..de0fa8008e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -14,8 +14,6 @@ repos: hooks: - id: terraform_fmt files: ^infrastructure/.*\.tf$ - - id: terraform_validate - files: ^infrastructure/.*\.tf$ - id: terraform_tflint files: ^infrastructure/.*\.tf$ From a41221f2a9754f5372a1a2a6ba9db4a0b0d8117a Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 20:19:41 +0530 Subject: [PATCH 18/35] refactor/clean storage module --- infrastructure/main.tf | 8 ++-- infrastructure/modules/storage/main.tf | 46 ++++++++------------- infrastructure/modules/storage/variables.tf | 24 +++++++++++ infrastructure/variables.tf | 6 ++- 4 files changed, 52 insertions(+), 32 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index 9e2ba5de3a..91bb3d6dd2 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -67,9 +67,11 @@ module "security" { module "storage" { source = "./modules/storage" - zappa_s3_bucket = var.zappa_s3_bucket - project_name = var.project_name - environment = var.environment + common_tags = local.common_tags + environment = var.environment + force_destroy_bucket = var.force_destroy_bucket + project_name = var.project_name + zappa_s3_bucket = var.zappa_s3_bucket } module "database" { diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 63de21b355..403c12d699 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -13,27 +13,22 @@ terraform { } } -# S3 Bucket for Zappa Deployments resource "aws_s3_bucket" "zappa" { # NOSONAR - bucket = var.zappa_s3_bucket - - tags = { + bucket = var.zappa_s3_bucket + force_destroy = var.force_destroy_bucket + tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-zappa-deployments" - } - force_destroy = true + }) } -# Block public access resource "aws_s3_bucket_public_access_block" "zappa" { - bucket = aws_s3_bucket.zappa.id - block_public_acls = true block_public_policy = true + bucket = aws_s3_bucket.zappa.id ignore_public_acls = true restrict_public_buckets = true } -# Enable versioning resource "aws_s3_bucket_versioning" "zappa" { bucket = aws_s3_bucket.zappa.id @@ -42,7 +37,6 @@ resource "aws_s3_bucket_versioning" "zappa" { } } -# Server-side encryption resource "aws_s3_bucket_server_side_encryption_configuration" "zappa" { bucket = aws_s3_bucket.zappa.id @@ -53,43 +47,39 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "zappa" { } } -# Lifecycle rule to clean up old versions resource "aws_s3_bucket_lifecycle_configuration" "zappa" { bucket = aws_s3_bucket.zappa.id rule { - id = "delete-old-versions" - status = "Enabled" - - noncurrent_version_expiration { - noncurrent_days = 30 - } - abort_incomplete_multipart_upload { - days_after_initiation = 7 + days_after_initiation = var.abort_incomplete_multipart_upload_days } + id = "delete-old-versions" + noncurrent_version_expiration { + noncurrent_days = var.noncurrent_version_expiration_days + } + status = "Enabled" } } -# Enforce HTTPS-only access data "aws_iam_policy_document" "zappa" { statement { - sid = "EnforceTls" + actions = ["s3:*"] + condition { + test = "Bool" + variable = "aws:SecureTransport" + values = ["false"] + } effect = "Deny" principals { type = "*" identifiers = ["*"] } - actions = ["s3:*"] resources = [ aws_s3_bucket.zappa.arn, "${aws_s3_bucket.zappa.arn}/*", ] - condition { - test = "Bool" - variable = "aws:SecureTransport" - values = ["false"] - } + sid = "EnforceTls" } } diff --git a/infrastructure/modules/storage/variables.tf b/infrastructure/modules/storage/variables.tf index 472e4d71cc..f4c9e8c20d 100644 --- a/infrastructure/modules/storage/variables.tf +++ b/infrastructure/modules/storage/variables.tf @@ -1,8 +1,32 @@ +variable "abort_incomplete_multipart_upload_days" { + description = "Specifies the number of days after which an incomplete multipart upload is aborted." + type = number + default = 7 +} + +variable "common_tags" { + description = "A map of common tags to apply to all resources." + type = map(string) + default = {} +} + variable "environment" { description = "The environment (e.g., staging, production)" type = string } +variable "force_destroy_bucket" { + description = "If true, deletes all objects from the bucket when the bucket is destroyed." + type = bool + default = false +} + +variable "noncurrent_version_expiration_days" { + description = "Specifies the number of days an object is noncurrent before it is expired." + type = number + default = 30 +} + variable "project_name" { description = "The name of the project" type = string diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf index ed17d4c244..229effede8 100644 --- a/infrastructure/variables.tf +++ b/infrastructure/variables.tf @@ -136,7 +136,11 @@ variable "db_backup_retention_period" { default = 7 } -# Environment Variables (temporary) +variable "force_destroy_bucket" { + description = "If true, deletes all objects from the bucket when the bucket is destroyed." + type = bool + default = false +} variable "django_algolia_application_id" { type = string description = "Algolia application ID." From 11651661302eb0738850f192f7b3d640bd2e3a36 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 22:46:30 +0530 Subject: [PATCH 19/35] Update docs --- infrastructure/README.md | 52 ++++++++++++---------------------------- 1 file changed, 15 insertions(+), 37 deletions(-) diff --git a/infrastructure/README.md b/infrastructure/README.md index 26568c6f38..2e10662777 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -1,57 +1,35 @@ # Infrastructure -This document provides instructions on how to manage the infrastructure for this project using Terraform and Zappa. +This document provides instructions on how to setup the infrastructure for this project. -## Terraform +## Prerequisites +Ensure you have the following setup/installed: -### Prerequisites - -- Terraform +- Setup Project: [CONTRIBUTING.md](https://github.com/OWASP/Nest/blob/main/CONTRIBUTING.md) +- Terraform: [Terraform Documentation](https://developer.hashicorp.com/terraform/docs) - An AWS account with credentials configured locally. -### Usage - -1. **Initialize Terraform:** - - ```bash - terraform init - ``` - -2. **Plan the changes:** +## Setting up the infrastructure +Follow these steps to set up the infrastructure: - ```bash - terraform plan - ``` +1. **Change the Directory**: -3. **Apply the changes:** + - Change the directory using the following command: - ```bash - terraform apply - ``` + ```bash + cd infrastructure/ + ``` -### Variables +*Note*: The following steps assume the current working directory is `infrastructure/` You can override the default values by creating a `terraform.tfvars` file in the `infrastructure/` directory. # TODO: Provide an example terraform.tfvars with important vars -### Outputs - -Get the output values using the `terraform output` command. These outputs will be used for Zappa configuration. - - -```bash -terraform output -``` - -```bash -terraform output -raw db_password redis_auth_token -``` - -## Zappa Deployment +## Setting up Zappa Deployment -The Django backend deployment is managed by Zappa, this also includes the API Gateway, IAM roles, and Lambda Function provision. +The Django backend deployment is managed by Zappa. This includes the API Gateway, IAM roles, and Lambda Function provision. ### Install poetry dependencies From cc8029242553f0e0731292d7f2dc84be1cd15327 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Thu, 23 Oct 2025 23:43:05 +0530 Subject: [PATCH 20/35] add S3 support for load-data task --- infrastructure/main.tf | 95 ++++---- infrastructure/modules/ecs/main.tf | 7 +- infrastructure/modules/ecs/variables.tf | 10 + infrastructure/modules/storage/main.tf | 123 +++++++---- infrastructure/modules/storage/outputs.tf | 10 + infrastructure/modules/storage/variables.tf | 5 + infrastructure/variables.tf | 231 ++++++++++---------- 7 files changed, 285 insertions(+), 196 deletions(-) diff --git a/infrastructure/main.tf b/infrastructure/main.tf index 91bb3d6dd2..34f3cce420 100644 --- a/infrastructure/main.tf +++ b/infrastructure/main.tf @@ -41,6 +41,54 @@ locals { } } +module "cache" { + source = "./modules/cache" + + common_tags = local.common_tags + environment = var.environment + project_name = var.project_name + redis_auth_token = var.redis_auth_token + redis_engine_version = var.redis_engine_version + redis_node_type = var.redis_node_type + redis_num_cache_nodes = var.redis_num_cache_nodes + redis_port = var.redis_port + security_group_ids = [module.security.redis_sg_id] + subnet_ids = module.networking.private_subnet_ids +} + +module "database" { + source = "./modules/database" + + common_tags = local.common_tags + db_allocated_storage = var.db_allocated_storage + db_backup_retention_period = var.db_backup_retention_period + db_engine_version = var.db_engine_version + db_instance_class = var.db_instance_class + db_name = var.db_name + db_password = var.db_password + db_storage_type = var.db_storage_type + db_subnet_ids = module.networking.private_subnet_ids + db_username = var.db_username + environment = var.environment + project_name = var.project_name + proxy_security_group_ids = [module.security.rds_proxy_sg_id] + security_group_ids = [module.security.rds_sg_id] +} + +module "ecs" { + source = "./modules/ecs" + + aws_region = var.aws_region + common_tags = local.common_tags + django_environment_variables = local.django_environment_variables + environment = var.environment + fixtures_read_only_policy_arn = module.storage.fixtures_read_only_policy_arn + fixtures_s3_bucket = var.fixtures_s3_bucket + lambda_sg_id = module.security.lambda_sg_id + private_subnet_ids = module.networking.private_subnet_ids + project_name = var.project_name +} + module "networking" { source = "./modules/networking" @@ -69,53 +117,8 @@ module "storage" { common_tags = local.common_tags environment = var.environment + fixtures_s3_bucket = var.fixtures_s3_bucket force_destroy_bucket = var.force_destroy_bucket project_name = var.project_name zappa_s3_bucket = var.zappa_s3_bucket } - -module "database" { - source = "./modules/database" - - common_tags = local.common_tags - db_allocated_storage = var.db_allocated_storage - db_backup_retention_period = var.db_backup_retention_period - db_engine_version = var.db_engine_version - db_instance_class = var.db_instance_class - db_name = var.db_name - db_password = var.db_password - db_storage_type = var.db_storage_type - db_subnet_ids = module.networking.private_subnet_ids - db_username = var.db_username - environment = var.environment - project_name = var.project_name - proxy_security_group_ids = [module.security.rds_proxy_sg_id] - security_group_ids = [module.security.rds_sg_id] -} - -module "cache" { - source = "./modules/cache" - - common_tags = local.common_tags - environment = var.environment - project_name = var.project_name - redis_auth_token = var.redis_auth_token - redis_engine_version = var.redis_engine_version - redis_node_type = var.redis_node_type - redis_num_cache_nodes = var.redis_num_cache_nodes - redis_port = var.redis_port - security_group_ids = [module.security.redis_sg_id] - subnet_ids = module.networking.private_subnet_ids -} - -module "ecs" { - source = "./modules/ecs" - - aws_region = var.aws_region - common_tags = local.common_tags - django_environment_variables = local.django_environment_variables - environment = var.environment - lambda_sg_id = module.security.lambda_sg_id - private_subnet_ids = module.networking.private_subnet_ids - project_name = var.project_name -} diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index 8c59bb649b..058b2e75e7 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -42,6 +42,11 @@ resource "aws_iam_role_policy_attachment" "ecs_tasks_execution_role_policy" { policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" } +resource "aws_iam_role_policy_attachment" "ecs_tasks_fixtures_s3_access" { + role = aws_iam_role.ecs_tasks_execution_role.name + policy_arn = var.fixtures_read_only_policy_arn +} + resource "aws_iam_role" "event_bridge_role" { name = "${var.project_name}-${var.environment}-event-bridge-role" tags = var.common_tags @@ -151,7 +156,7 @@ module "load_data_task" { source = "./modules/task" aws_region = var.aws_region - command = ["python", "manage.py", "load_data"] + command = ["/bin/sh", "-c", "aws s3 cp s3://${var.fixtures_s3_bucket}/nest.json.gz /data/nest.json.gz && python manage.py load_data --file /data/nest.json.gz"] common_tags = var.common_tags container_environment = var.django_environment_variables cpu = var.load_data_task_cpu diff --git a/infrastructure/modules/ecs/variables.tf b/infrastructure/modules/ecs/variables.tf index c8dffa7925..b4ab964f83 100644 --- a/infrastructure/modules/ecs/variables.tf +++ b/infrastructure/modules/ecs/variables.tf @@ -21,6 +21,16 @@ variable "environment" { type = string } +variable "fixtures_read_only_policy_arn" { + description = "The ARN of the fixtures read-only IAM policy" + type = string +} + +variable "fixtures_s3_bucket" { + description = "The name of the S3 bucket for fixtures" + type = string +} + variable "index_data_task_cpu" { description = "The CPU for the index-data task" type = string diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 403c12d699..99a80fd028 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -13,6 +13,65 @@ terraform { } } +data "aws_iam_policy_document" "zappa" { + statement { + actions = ["s3:*"] + condition { + test = "Bool" + variable = "aws:SecureTransport" + values = ["false"] + } + effect = "Deny" + principals { + type = "*" + identifiers = ["*"] + } + resources = [ + aws_s3_bucket.zappa.arn, + "${aws_s3_bucket.zappa.arn}/*", + ] + sid = "EnforceTls" + } +} + +resource "aws_iam_policy" "fixtures_read_only" { + name = "${var.project_name}-${var.environment}-fixtures-read-only" + description = "Allows read-only access to the fixtures S3 bucket" + + policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = [ + "s3:GetObject" + ] + Effect = "Allow" + Resource = "arn:aws:s3:::${var.fixtures_s3_bucket}/*" + } + ] + }) +} + +resource "aws_s3_bucket" "fixtures" { + bucket = var.fixtures_s3_bucket + tags = var.common_tags +} + +resource "aws_s3_bucket_lifecycle_configuration" "zappa" { + bucket = aws_s3_bucket.zappa.id + + rule { + abort_incomplete_multipart_upload { + days_after_initiation = var.abort_incomplete_multipart_upload_days + } + id = "delete-old-versions" + noncurrent_version_expiration { + noncurrent_days = var.noncurrent_version_expiration_days + } + status = "Enabled" + } +} + resource "aws_s3_bucket" "zappa" { # NOSONAR bucket = var.zappa_s3_bucket force_destroy = var.force_destroy_bucket @@ -21,6 +80,19 @@ resource "aws_s3_bucket" "zappa" { # NOSONAR }) } +resource "aws_s3_bucket_policy" "zappa" { + bucket = aws_s3_bucket.zappa.id + policy = data.aws_iam_policy_document.zappa.json +} + +resource "aws_s3_bucket_public_access_block" "fixtures" { + block_public_acls = true + block_public_policy = true + bucket = aws_s3_bucket.fixtures.id + ignore_public_acls = true + restrict_public_buckets = true +} + resource "aws_s3_bucket_public_access_block" "zappa" { block_public_acls = true block_public_policy = true @@ -29,11 +101,13 @@ resource "aws_s3_bucket_public_access_block" "zappa" { restrict_public_buckets = true } -resource "aws_s3_bucket_versioning" "zappa" { - bucket = aws_s3_bucket.zappa.id +resource "aws_s3_bucket_server_side_encryption_configuration" "fixtures" { + bucket = aws_s3_bucket.fixtures.id - versioning_configuration { - status = "Enabled" + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "AES256" + } } } @@ -47,43 +121,18 @@ resource "aws_s3_bucket_server_side_encryption_configuration" "zappa" { } } -resource "aws_s3_bucket_lifecycle_configuration" "zappa" { - bucket = aws_s3_bucket.zappa.id +resource "aws_s3_bucket_versioning" "fixtures" { + bucket = aws_s3_bucket.fixtures.id - rule { - abort_incomplete_multipart_upload { - days_after_initiation = var.abort_incomplete_multipart_upload_days - } - id = "delete-old-versions" - noncurrent_version_expiration { - noncurrent_days = var.noncurrent_version_expiration_days - } + versioning_configuration { status = "Enabled" } } -data "aws_iam_policy_document" "zappa" { - statement { - actions = ["s3:*"] - condition { - test = "Bool" - variable = "aws:SecureTransport" - values = ["false"] - } - effect = "Deny" - principals { - type = "*" - identifiers = ["*"] - } - resources = [ - aws_s3_bucket.zappa.arn, - "${aws_s3_bucket.zappa.arn}/*", - ] - sid = "EnforceTls" - } -} - -resource "aws_s3_bucket_policy" "zappa" { +resource "aws_s3_bucket_versioning" "zappa" { bucket = aws_s3_bucket.zappa.id - policy = data.aws_iam_policy_document.zappa.json + + versioning_configuration { + status = "Enabled" + } } diff --git a/infrastructure/modules/storage/outputs.tf b/infrastructure/modules/storage/outputs.tf index 79d46ece85..7adfc4ecfa 100644 --- a/infrastructure/modules/storage/outputs.tf +++ b/infrastructure/modules/storage/outputs.tf @@ -1,3 +1,13 @@ +output "fixtures_read_only_policy_arn" { + description = "The ARN of the fixtures read-only IAM policy" + value = aws_iam_policy.fixtures_read_only.arn +} + +output "fixtures_s3_bucket_arn" { + description = "The ARN of the S3 bucket for fixtures" + value = aws_s3_bucket.fixtures.arn +} + output "zappa_s3_bucket_arn" { description = "The ARN of the S3 bucket for Zappa deployments" value = aws_s3_bucket.zappa.arn diff --git a/infrastructure/modules/storage/variables.tf b/infrastructure/modules/storage/variables.tf index f4c9e8c20d..7999d8cb10 100644 --- a/infrastructure/modules/storage/variables.tf +++ b/infrastructure/modules/storage/variables.tf @@ -15,6 +15,11 @@ variable "environment" { type = string } +variable "fixtures_s3_bucket" { + description = "The name of the S3 bucket for fixtures" + type = string +} + variable "force_destroy_bucket" { description = "If true, deletes all objects from the bucket when the bucket is destroyed." type = bool diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf index 229effede8..0178147cad 100644 --- a/infrastructure/variables.tf +++ b/infrastructure/variables.tf @@ -7,7 +7,7 @@ variable "aws_region" { variable "availability_zones" { description = "A list of availability zones for the VPC" type = list(string) - default = ["us-east-1a", "us-east-1b"] + default = ["us-east-1a", "us-east-1b", "us-east-1c"] } variable "db_allocated_storage" { @@ -16,6 +16,12 @@ variable "db_allocated_storage" { default = 20 } +variable "db_backup_retention_period" { + description = "The number of days to retain backups for" + type = number + default = 7 +} + variable "db_engine_version" { description = "The version of the PostgreSQL engine" type = string @@ -31,7 +37,7 @@ variable "db_instance_class" { variable "db_name" { description = "The name of the RDS database" type = string - default = "nestdb" + default = "owasp_nest" } variable "db_password" { @@ -47,121 +53,79 @@ variable "db_port" { default = 5432 } -variable "db_username" { - description = "The username for the RDS database" +variable "db_storage_type" { + description = "The storage type for the RDS database" type = string - default = "nestuser" + default = "gp3" } -variable "environment" { - description = "The environment (e.g., staging, production)" +variable "db_username" { + description = "The username for the RDS database" type = string - default = "staging" - validation { - condition = contains(["staging", "production"], var.environment) - error_message = "Environment must be either 'staging' or 'production'." - } + default = "owasp_nest_user" } -variable "private_subnet_cidrs" { - description = "A list of CIDR blocks for the private subnets" - type = list(string) - default = ["10.0.3.0/24", "10.0.4.0/24"] -} - -variable "project_name" { - description = "The name of the project" +variable "django_algolia_application_id" { type = string - default = "nest" -} - -variable "public_subnet_cidrs" { - description = "A list of CIDR blocks for the public subnets" - type = list(string) - default = ["10.0.1.0/24", "10.0.2.0/24"] + description = "Algolia application ID." + default = null } -variable "redis_auth_token" { - description = "The auth token for Redis" +variable "django_algolia_write_api_key" { type = string + description = "Algolia write API key." sensitive = true default = null } -variable "redis_engine_version" { - description = "The version of the Redis engine" - type = string - default = "7.0" -} - -variable "redis_node_type" { - description = "The node type for the Redis cache" +variable "django_allowed_hosts" { type = string - default = "cache.t3.micro" -} - -variable "redis_num_cache_nodes" { - description = "The number of cache nodes in the Redis cluster" - type = number - default = 1 -} - -variable "redis_port" { - description = "The port for the Redis cache" - type = number - default = 6379 + description = "Comma-separated list of allowed hosts for Django." + default = null } -variable "vpc_cidr" { - description = "The CIDR block for the VPC" +variable "django_aws_access_key_id" { type = string - default = "10.0.0.0/16" + description = "AWS access key for Django." + sensitive = true + default = null } -variable "zappa_s3_bucket" { - description = "The name of the S3 bucket for Zappa deployments" +variable "django_aws_secret_access_key" { type = string - default = "nest-zappa-deployments" + description = "AWS secret access key for Django." + sensitive = true + default = null } -variable "db_storage_type" { - description = "The storage type for the RDS database" +variable "django_configuration" { type = string - default = "gp3" -} - -variable "db_backup_retention_period" { - description = "The number of days to retain backups for" - type = number - default = 7 + description = "Django Configuration" + default = null } -variable "force_destroy_bucket" { - description = "If true, deletes all objects from the bucket when the bucket is destroyed." - type = bool - default = false -} -variable "django_algolia_application_id" { +variable "django_db_host" { type = string - description = "Algolia application ID." + description = "Database host URL." default = null } -variable "django_allowed_hosts" { +variable "django_db_name" { type = string - description = "Comma-separated list of allowed hosts for Django." + description = "Database name." default = null } -variable "django_db_host" { +variable "django_db_password" { type = string - description = "Database host URL." + description = "Database password." + sensitive = true default = null } -variable "django_db_name" { +variable "django_db_port" { type = string - description = "Database name." + description = "Database port." default = null } @@ -171,9 +135,10 @@ variable "django_db_user" { default = null } -variable "django_db_port" { +variable "django_open_ai_secret_key" { type = string - description = "Database port." + description = "OpenAI secret key." + sensitive = true default = null } @@ -183,78 +148,120 @@ variable "django_redis_host" { default = null } -variable "django_algolia_write_api_key" { +variable "django_redis_password" { type = string - description = "Algolia write API key." + description = "Redis password." sensitive = true default = null } -variable "django_aws_access_key_id" { +variable "django_secret_key" { type = string - description = "AWS access key for Django." + description = "Django secret key." sensitive = true default = null } -variable "django_aws_secret_access_key" { +variable "django_sentry_dsn" { type = string - description = "AWS secret access key for Django." + description = "Sentry DSN for error tracking." sensitive = true default = null } -variable "django_configuration" { +variable "django_slack_bot_token" { type = string - description = "Django Configuration" + description = "Slack bot token." + sensitive = true default = null } -variable "django_db_password" { +variable "django_slack_signing_secret" { type = string - description = "Database password." + description = "Slack signing secret." sensitive = true default = null } -variable "django_open_ai_secret_key" { +variable "environment" { + description = "The environment (e.g., staging, production)" type = string - description = "OpenAI secret key." - sensitive = true - default = null + default = "staging" + validation { + condition = contains(["staging", "production"], var.environment) + error_message = "Environment must be either 'staging' or 'production'." + } } -variable "django_redis_password" { +variable "force_destroy_bucket" { + description = "If true, deletes all objects from the bucket when the bucket is destroyed." + type = bool + default = false +} + +variable "fixtures_s3_bucket" { + description = "The name of the S3 bucket for fixtures" type = string - description = "Redis password." - sensitive = true - default = null + default = "nest-fixtures" } -variable "django_secret_key" { +variable "private_subnet_cidrs" { + description = "A list of CIDR blocks for the private subnets" + type = list(string) + default = ["10.0.11.0/24", "10.0.12.0/24", "10.0.13.0/24"] +} + +variable "project_name" { + description = "The name of the project" type = string - description = "Django secret key." - sensitive = true - default = null + default = "nest" } -variable "django_sentry_dsn" { +variable "public_subnet_cidrs" { + description = "A list of CIDR blocks for the public subnets" + type = list(string) + default = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"] +} + +variable "redis_auth_token" { + description = "The auth token for Redis" type = string - description = "Sentry DSN for error tracking." sensitive = true default = null } -variable "django_slack_bot_token" { +variable "redis_engine_version" { + description = "The version of the Redis engine" type = string - description = "Slack bot token." - sensitive = true - default = null + default = "7.0" } -variable "django_slack_signing_secret" { +variable "redis_node_type" { + description = "The node type for the Redis cache" type = string - description = "Slack signing secret." - sensitive = true - default = null + default = "cache.t3.micro" +} + +variable "redis_num_cache_nodes" { + description = "The number of cache nodes in the Redis cluster" + type = number + default = 1 +} + +variable "redis_port" { + description = "The port for the Redis cache" + type = number + default = 6379 +} + +variable "vpc_cidr" { + description = "The CIDR block for the VPC" + type = string + default = "10.0.0.0/16" +} + +variable "zappa_s3_bucket" { + description = "The name of the S3 bucket for Zappa deployments" + type = string + default = "owasp-nest-zappa-deployments" } From c694c57441c748987968d1c43703dd0e62fcf6c5 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 00:44:03 +0530 Subject: [PATCH 21/35] Update README and necessary examples --- backend/zappa_settings.example.json | 8 +- infrastructure/README.md | 191 ++++++++++++++++++---- infrastructure/modules/storage/outputs.tf | 5 + infrastructure/outputs.tf | 5 + infrastructure/terraform.tfvars.example | 23 +++ infrastructure/variables.tf | 2 +- 6 files changed, 197 insertions(+), 37 deletions(-) create mode 100644 infrastructure/terraform.tfvars.example diff --git a/backend/zappa_settings.example.json b/backend/zappa_settings.example.json index e20dd34623..7e35e81c4b 100644 --- a/backend/zappa_settings.example.json +++ b/backend/zappa_settings.example.json @@ -28,11 +28,11 @@ "s3_bucket": "${ZAPPA_S3_BUCKET}", "slim_handler": true, "vpc_config": { - "SecurityGroupIds": ["${AWS_VPC_SECURITY_GROUP}"], + "SecurityGroupIds": ["${AWS_LAMBDA_VPC_SECURITY_GROUP_ID}"], "SubnetIds": [ - "${AWS_VPC_SUBNET_A}", - "${AWS_VPC_SUBNET_B}", - "${AWS_VPC_SUBNET_C}" + "${AWS_PRIVATE_SUBNET_A}", + "${AWS_PRIVATE_SUBNET_B}", + "${AWS_PRIVATE_SUBNET_C}" ] } } diff --git a/infrastructure/README.md b/infrastructure/README.md index 2e10662777..b8d3ef6e69 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -1,5 +1,6 @@ # Infrastructure + This document provides instructions on how to setup the infrastructure for this project. ## Prerequisites @@ -19,42 +20,114 @@ Follow these steps to set up the infrastructure: ```bash cd infrastructure/ ``` + *Note*: The following steps assume the current working directory is `infrastructure/` + +2. **Create Variables File**: + + - Create a local variables file in the `infrastructure` directory: + + ```bash + touch terraform.tfvars + ``` + + - Copy the contents from the template file into your new local environment file: + + ```bash + cat terraform.tfvars.example > terraform.tfvars + ``` + - Update the default `django_` prefixed variables. (database/redis credentials will be added later) -*Note*: The following steps assume the current working directory is `infrastructure/` +3. **Apply Changes**: + + - Apply the changes and create the infrastructure using the following command: + + ```bash + terraform apply + ``` + +4. **Copy Outputs**: + + - Run the following command to view all outputs. Use the `-raw` flag for sensitive outputs. + - Copy required outputs (i.e. `database_endpoint`, `db_password`, `redis_auth_token`, and `redis_endpoint`) + to the previously created `terraform.tfvars`: + + ```bash + terraform output + ``` + Example Output: + ```bash + database_endpoint = "owasp-nest-staging-proxy.proxy-000000000000.ap-south-1.rds.amazonaws.com" + db_password = + ecr_repository_url = "000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend" + lambda_security_group_id = "sg-00000000000000000" + private_subnet_ids = [ + "subnet-00000000000000000", + "subnet-11111111111111111", + "subnet-22222222222222222", + ] + redis_auth_token = + redis_endpoint = "master.owasp-nest-staging-cache.aaaaaa.aps1.cache.amazonaws.com" + zappa_s3_bucket = "owasp-nest-zappa-deployments" + ``` + ```bash + terraform output -raw db_password + ``` + ```bash + terraform output -raw redis_auth_token + ``` -You can override the default values by creating a `terraform.tfvars` file in the `infrastructure/` directory. +5. **Apply The Changes Again**: -# TODO: Provide an example terraform.tfvars with important vars + - Apply the changes again using the following command: + ```bash + terraform apply + ``` +*Note*: Step 4 and 5 ensure that ECS/Fargate tasks have proper environment variables. +These two steps will be removed when AWS Secrets Manager is integrated. -## Setting up Zappa Deployment +## Setting up Zappa The Django backend deployment is managed by Zappa. This includes the API Gateway, IAM roles, and Lambda Function provision. -### Install poetry dependencies +1. **Change Directory**: -1. **Install dependencies using Poetry:** + - Change the directory to `backend/` using the following command: - ```bash - poetry install - ``` + ```bash + cd ../backend/ + ``` + *Note*: The following steps assume the current working directory is `backend/` -2. **Activate the virtual environment:** +2. **Setup Dependencies**: - ```bash - eval $(poetry env activate) - ``` + - This step may differ for different operating systems. + - The goal is to install dependencies listed in `pyproject.toml`. + - Steps for Linux: + ```bash + poetry install && eval $(poetry env activate) + ``` -3. **Create a `zappa_settings.json` file:** +3. **Create Zappa Settings File**: - ```bash - cp zappa_settings.example.json zappa_settings.json - ``` + - Create a local Zappa settings file in the `backend` directory: + + ```bash + touch zappa.settings.json + ``` + + - Copy the contents from the template file into your new local environment file: + + ```bash + cat zappa.settings.example.json > zappa.settings.json + ``` + +3. **Populate Settings File**: + + - Replace all `${...}` variables in `zappa_settings.json` with appropriate output variables. -Replace all variables in the copied `zappa_settings.json` with appropriate secrets. -# TODO: explain this step -4. **Deploy staging:** +4. **Deploy**: ```bash zappa deploy staging @@ -62,22 +135,76 @@ Replace all variables in the copied `zappa_settings.json` with appropriate secre Once deployed, Zappa will provide you with a URL. You can use this URL to test the API. -### Updating -After making necessary changes, you may run the following command to update the deployment. -```bash -zappa update staging -``` +## Setup Database -### Cleaning Up +Migrate and load data into the new database. -To delete the deployment, you can use the following command: +1. **Setup ECR Image**: + - Login to the Elastic Container Registry using the following command: -```bash -zappa undeploy local -``` + *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + *TODO*: Add warning to configure a credential helper instead of using following command. + + ```bash + aws ecr get-login-password --region ap-south-1 | docker login --username AWS --password-stdin 000000000000.dkr.ecr.ap-south-1.amazonaws.com + ``` + + - Build the backend image using the following command: + + ```bash + docker build -t owasp-nest-staging-backend:latest -f docker/Dockerfile . + ``` + + - Tag the image: + *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + ```bash + docker tag owasp-nest-staging-backend:latest 000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend:latest + ``` + + - Push the image: + *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + ```bash + docker push 000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend:latest + ``` + +2. **Upload Fixture to S3**: + - Upload the fixture present in `backend/data` to `nest-fixtures` bucket using the following command: -Then run this command to destroy the terraform infrastructure: + ```bash + aws s3 cp data/nest.json.gz s3://nest-fixtures/ + ``` +3. **Run ECS Tasks**: + - Head over to Elastic Container Service in the AWS Console. + - Click on `owasp-nest-staging-migrate` in `Task Definitions` section. + - Click Deploy > Run Task. + - Use the following configuration: + - Task details + - Task definition revision: LATEST + - Networking: + - VPC: owasp-nest-staging-vpc + - Security group name: select all with `owasp-nest-staging-` prefix. + (*Note*: temporary step, will be further improved) + - Click "Create" + - The task is now running... Click on the task ID to view Logs, Status, etc. + - Follow the same steps for `owasp-nest-staging-load-data` and `owasp-nest-staging-index-data`. + +## Cleaning Up + + - To delete the deployment use the following command: + + ```bash + zappa undeploy local + ``` + + - To destroy Terraform infrastructure: + + ```bash + terraform destroy + ``` + +## Helpful Commands +To update a Zappa `staging` deployment run: ```bash -terraform destroy +zappa update staging ``` diff --git a/infrastructure/modules/storage/outputs.tf b/infrastructure/modules/storage/outputs.tf index 7adfc4ecfa..707b4d324e 100644 --- a/infrastructure/modules/storage/outputs.tf +++ b/infrastructure/modules/storage/outputs.tf @@ -8,6 +8,11 @@ output "fixtures_s3_bucket_arn" { value = aws_s3_bucket.fixtures.arn } +output "zappa_s3_bucket" { + description = "The name of the S3 bucket for Zappa deployments" + value = aws_s3_bucket.zappa +} + output "zappa_s3_bucket_arn" { description = "The ARN of the S3 bucket for Zappa deployments" value = aws_s3_bucket.zappa.arn diff --git a/infrastructure/outputs.tf b/infrastructure/outputs.tf index 2c2724cdd2..dc2e0a9ba9 100644 --- a/infrastructure/outputs.tf +++ b/infrastructure/outputs.tf @@ -34,3 +34,8 @@ output "ecr_repository_url" { description = "The URL of the ECR repository" value = module.ecs.ecr_repository_url } + +output "zappa_s3_bucket" { + description = "The name of the S3 bucket for Zappa deployments" + value = module.storage.zappa_s3_bucket.bucket +} diff --git a/infrastructure/terraform.tfvars.example b/infrastructure/terraform.tfvars.example new file mode 100644 index 0000000000..d6096d422a --- /dev/null +++ b/infrastructure/terraform.tfvars.example @@ -0,0 +1,23 @@ +availability_zones = ["ap-south-1a", "ap-south-1b", "ap-south-1c"] +aws_region = "ap-south-1" +django_algolia_application_id = "" +django_algolia_write_api_key = "" +django_allowed_hosts = ".execute-api.ap-south-1.amazonaws.com" +django_aws_access_key_id = "" +django_aws_secret_access_key = "" +django_configuration = "Staging" +django_db_host = "" +django_db_name = "owasp_nest" +django_db_user = "owasp_nest_db_user" +django_db_port = "5432" +django_db_password = "" +django_open_ai_secret_key = "" +django_redis_host = "" +django_redis_password = "" +django_secret_key = "" +django_sentry_dsn = "" +django_slack_bot_token = "" +django_slack_signing_secret = "" +environment = "staging" +force_destroy_bucket = true +project_name = "owasp-nest" diff --git a/infrastructure/variables.tf b/infrastructure/variables.tf index 0178147cad..77fb8aa747 100644 --- a/infrastructure/variables.tf +++ b/infrastructure/variables.tf @@ -62,7 +62,7 @@ variable "db_storage_type" { variable "db_username" { description = "The username for the RDS database" type = string - default = "owasp_nest_user" + default = "owasp_nest_db_user" } variable "django_algolia_application_id" { From 753e234e783e384598b7146c7f85fbb096c08c6f Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:06:49 +0530 Subject: [PATCH 22/35] coderabbit suggestions --- infrastructure/modules/ecs/main.tf | 6 +++++- infrastructure/modules/ecs/variables.tf | 10 +++++----- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index 058b2e75e7..8c8f786b12 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -16,7 +16,11 @@ resource "aws_ecs_cluster" "main" { resource "aws_ecr_repository" "main" { name = "${var.project_name}-${var.environment}-backend" - tags = var.common_tags + image_scanning_configuration { + scan_on_push = true + } + image_tag_mutability = "IMMUTABLE" + tags = var.common_tags } resource "aws_iam_role" "ecs_tasks_execution_role" { diff --git a/infrastructure/modules/ecs/variables.tf b/infrastructure/modules/ecs/variables.tf index b4ab964f83..5133bf9b77 100644 --- a/infrastructure/modules/ecs/variables.tf +++ b/infrastructure/modules/ecs/variables.tf @@ -61,13 +61,13 @@ variable "load_data_task_memory" { } variable "migrate_task_cpu" { - description = "The CPU for the load-data task" + description = "The CPU for the migrate task" type = string default = "256" } variable "migrate_task_memory" { - description = "The memory for the load-data task" + description = "The memory for the migrate task" type = string default = "2048" } @@ -91,7 +91,7 @@ variable "sync_data_task_cpu" { variable "sync_data_task_memory" { description = "The memory for the sync-data task" type = string - default = "512" + default = "2048" } variable "update_project_health_metrics_task_cpu" { @@ -103,7 +103,7 @@ variable "update_project_health_metrics_task_cpu" { variable "update_project_health_metrics_task_memory" { description = "The memory for the update-project-health-metrics task" type = string - default = "512" + default = "2048" } variable "update_project_health_scores_task_cpu" { @@ -115,5 +115,5 @@ variable "update_project_health_scores_task_cpu" { variable "update_project_health_scores_task_memory" { description = "The memory for the update-project-health-scores task" type = string - default = "512" + default = "2048" } From 16445dc1e8b2deab2a62d4d8748a3a7c7fd67c25 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:23:54 +0530 Subject: [PATCH 23/35] Update code: install awscli before invoking aws in task --- infrastructure/modules/ecs/main.tf | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index 8c8f786b12..8c50312419 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -159,8 +159,17 @@ module "migrate_task" { module "load_data_task" { source = "./modules/task" - aws_region = var.aws_region - command = ["/bin/sh", "-c", "aws s3 cp s3://${var.fixtures_s3_bucket}/nest.json.gz /data/nest.json.gz && python manage.py load_data --file /data/nest.json.gz"] + aws_region = var.aws_region + command = [ + "/bin/sh", + "-c", + <<-EOT + set -e + pip install --quiet awscli + aws s3 cp s3://${var.fixtures_s3_bucket}/nest.json.gz /tmp/nest.json.gz + python manage.py load_data --file /tmp/nest.json.gz + EOT + ] common_tags = var.common_tags container_environment = var.django_environment_variables cpu = var.load_data_task_cpu From 536e615d439aff9007619d74d6ddf622e51cccc7 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:25:32 +0530 Subject: [PATCH 24/35] update README --- infrastructure/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/README.md b/infrastructure/README.md index b8d3ef6e69..908aa5369b 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -194,7 +194,7 @@ Migrate and load data into the new database. - To delete the deployment use the following command: ```bash - zappa undeploy local + zappa undeploy staging ``` - To destroy Terraform infrastructure: From 206e60e0f1bbc00d3a91e3bd6477f808321c3dc3 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:26:47 +0530 Subject: [PATCH 25/35] NOSONAR --- infrastructure/modules/storage/main.tf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/infrastructure/modules/storage/main.tf b/infrastructure/modules/storage/main.tf index 99a80fd028..e0c29ee8cd 100644 --- a/infrastructure/modules/storage/main.tf +++ b/infrastructure/modules/storage/main.tf @@ -52,7 +52,7 @@ resource "aws_iam_policy" "fixtures_read_only" { }) } -resource "aws_s3_bucket" "fixtures" { +resource "aws_s3_bucket" "fixtures" { # NOSONAR bucket = var.fixtures_s3_bucket tags = var.common_tags } From 748ed52ad195b9a106b2b7f44de265e75be624c0 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:36:26 +0530 Subject: [PATCH 26/35] update README --- infrastructure/README.md | 29 +++++++++++++++-------------- 1 file changed, 15 insertions(+), 14 deletions(-) diff --git a/infrastructure/README.md b/infrastructure/README.md index 908aa5369b..9fb465ea8f 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -122,12 +122,12 @@ The Django backend deployment is managed by Zappa. This includes the API Gateway cat zappa.settings.example.json > zappa.settings.json ``` -3. **Populate Settings File**: +4. **Populate Settings File**: - Replace all `${...}` variables in `zappa_settings.json` with appropriate output variables. -4. **Deploy**: +5. **Deploy**: ```bash zappa deploy staging @@ -191,20 +191,21 @@ Migrate and load data into the new database. ## Cleaning Up - - To delete the deployment use the following command: +- To delete the deployment use the following command: - ```bash - zappa undeploy staging - ``` + ```bash + zappa undeploy staging + ``` - - To destroy Terraform infrastructure: +- To destroy Terraform infrastructure: - ```bash - terraform destroy - ``` + ```bash + terraform destroy + ``` ## Helpful Commands -To update a Zappa `staging` deployment run: -```bash -zappa update staging -``` +- To update a Zappa `staging` deployment run: + + ```bash + zappa update staging + ``` From f71861c811d79140249db7a0f3a256c20a4f1098 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 01:39:19 +0530 Subject: [PATCH 27/35] fix spell check --- cspell/custom-dict.txt | 1 + infrastructure/README.md | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/cspell/custom-dict.txt b/cspell/custom-dict.txt index fd0744b8c7..7acc1d25a1 100644 --- a/cspell/custom-dict.txt +++ b/cspell/custom-dict.txt @@ -54,6 +54,7 @@ csrftoken cva demojize dismissable +dkr dsn env facebookexternalhit diff --git a/infrastructure/README.md b/infrastructure/README.md index 9fb465ea8f..c5b502c51b 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -66,7 +66,7 @@ Follow these steps to set up the infrastructure: "subnet-22222222222222222", ] redis_auth_token = - redis_endpoint = "master.owasp-nest-staging-cache.aaaaaa.aps1.cache.amazonaws.com" + redis_endpoint = "master.owasp-nest-staging-cache.aaaaaa.region1.cache.amazonaws.com" zappa_s3_bucket = "owasp-nest-zappa-deployments" ``` ```bash From e785a42905b90a8eac8f231d63269c0d4f503150 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 20:26:26 +0530 Subject: [PATCH 28/35] Update README --- infrastructure/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/README.md b/infrastructure/README.md index c5b502c51b..70c1bd222b 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -1,6 +1,5 @@ # Infrastructure - This document provides instructions on how to setup the infrastructure for this project. ## Prerequisites @@ -143,7 +142,8 @@ Migrate and load data into the new database. - Login to the Elastic Container Registry using the following command: *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. - *TODO*: Add warning to configure a credential helper instead of using following command. + + *Warning*: Configure a credential helper instead of using following command to login. ```bash aws ecr get-login-password --region ap-south-1 | docker login --username AWS --password-stdin 000000000000.dkr.ecr.ap-south-1.amazonaws.com From bc3737e320a4af2bc94a19fe3252ea82817fbafa Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 20:32:40 +0530 Subject: [PATCH 29/35] fix: typo --- infrastructure/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/infrastructure/README.md b/infrastructure/README.md index 70c1bd222b..324506ae3c 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -112,13 +112,13 @@ The Django backend deployment is managed by Zappa. This includes the API Gateway - Create a local Zappa settings file in the `backend` directory: ```bash - touch zappa.settings.json + touch zappa_settings.json ``` - Copy the contents from the template file into your new local environment file: ```bash - cat zappa.settings.example.json > zappa.settings.json + cat zappa_settings.example.json > zappa_settings.json ``` 4. **Populate Settings File**: From e51d34bf4cd38d574a9962ba2738795d92e8cd5b Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 20:40:28 +0530 Subject: [PATCH 30/35] remove immutable tagged images --- infrastructure/modules/ecs/main.tf | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index 8c50312419..e4f1c2369d 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -19,8 +19,7 @@ resource "aws_ecr_repository" "main" { image_scanning_configuration { scan_on_push = true } - image_tag_mutability = "IMMUTABLE" - tags = var.common_tags + tags = var.common_tags } resource "aws_iam_role" "ecs_tasks_execution_role" { From d4190736f0d1eb391fae011bfeb5fba9e52344ac Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 21:12:20 +0530 Subject: [PATCH 31/35] fix: load-data task fail due to no permissions --- infrastructure/modules/ecs/main.tf | 30 +++++++++++++++---- .../modules/ecs/modules/task/main.tf | 1 + .../modules/ecs/modules/task/variables.tf | 6 ++++ 3 files changed, 32 insertions(+), 5 deletions(-) diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index e4f1c2369d..b4707b3982 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -45,8 +45,26 @@ resource "aws_iam_role_policy_attachment" "ecs_tasks_execution_role_policy" { policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" } -resource "aws_iam_role_policy_attachment" "ecs_tasks_fixtures_s3_access" { - role = aws_iam_role.ecs_tasks_execution_role.name +resource "aws_iam_role" "ecs_task_role" { + name = "${var.project_name}-${var.environment}-ecs-task-role" + tags = var.common_tags + + assume_role_policy = jsonencode({ + Version = "2012-10-17" + Statement = [ + { + Action = "sts:AssumeRole" + Effect = "Allow" + Principal = { + Service = "ecs-tasks.amazonaws.com" + } + } + ] + }) +} + +resource "aws_iam_role_policy_attachment" "ecs_task_role_fixtures_s3_access" { + role = aws_iam_role.ecs_task_role.name policy_arn = var.fixtures_read_only_policy_arn } @@ -164,9 +182,10 @@ module "load_data_task" { "-c", <<-EOT set -e - pip install --quiet awscli - aws s3 cp s3://${var.fixtures_s3_bucket}/nest.json.gz /tmp/nest.json.gz - python manage.py load_data --file /tmp/nest.json.gz + pip install --target=/tmp/awscli-packages awscli + export PYTHONPATH="/tmp/awscli-packages:$PYTHONPATH" + python /tmp/awscli-packages/bin/aws s3 cp s3://${var.fixtures_s3_bucket}/nest.json.gz /tmp/nest.json.gz + python manage.py loaddata /tmp/nest.json.gz -v 3 EOT ] common_tags = var.common_tags @@ -181,6 +200,7 @@ module "load_data_task" { project_name = var.project_name security_group_ids = [var.lambda_sg_id] task_name = "load-data" + task_role_arn = aws_iam_role.ecs_task_role.arn } module "index_data_task" { diff --git a/infrastructure/modules/ecs/modules/task/main.tf b/infrastructure/modules/ecs/modules/task/main.tf index a85ce0889a..4f43fecd3d 100644 --- a/infrastructure/modules/ecs/modules/task/main.tf +++ b/infrastructure/modules/ecs/modules/task/main.tf @@ -24,6 +24,7 @@ resource "aws_ecs_task_definition" "task" { cpu = var.cpu memory = var.memory execution_role_arn = var.ecs_tasks_execution_role_arn + task_role_arn = var.task_role_arn tags = merge(var.common_tags, { Name = "${var.project_name}-${var.environment}-${var.task_name}-task-def" }) diff --git a/infrastructure/modules/ecs/modules/task/variables.tf b/infrastructure/modules/ecs/modules/task/variables.tf index 7c0a86c23e..85e7ce9b06 100644 --- a/infrastructure/modules/ecs/modules/task/variables.tf +++ b/infrastructure/modules/ecs/modules/task/variables.tf @@ -87,3 +87,9 @@ variable "task_name" { description = "The unique name of the task." type = string } + +variable "task_role_arn" { + description = "The ARN of the IAM role for the task." + type = string + default = null +} From a347969b59ab56ca7986dda9652123e7c3ef3d13 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Fri, 24 Oct 2025 21:14:42 +0530 Subject: [PATCH 32/35] fix spell check: add PYTHONPATH --- cspell/custom-dict.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/cspell/custom-dict.txt b/cspell/custom-dict.txt index 7acc1d25a1..656a6baae7 100644 --- a/cspell/custom-dict.txt +++ b/cspell/custom-dict.txt @@ -19,6 +19,7 @@ NOSONAR Nadu Nominatim PLR +PYTHONPATH PYTHONUNBUFFERED RUF SEO From 88af956fee59d177d14e03e42cc5282eea6cda86 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Sat, 25 Oct 2025 00:23:10 +0530 Subject: [PATCH 33/35] add AWS CLI --- infrastructure/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/infrastructure/README.md b/infrastructure/README.md index 324506ae3c..c1f4dd07a5 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -7,6 +7,7 @@ Ensure you have the following setup/installed: - Setup Project: [CONTRIBUTING.md](https://github.com/OWASP/Nest/blob/main/CONTRIBUTING.md) - Terraform: [Terraform Documentation](https://developer.hashicorp.com/terraform/docs) +- AWS CLI: [AWS CLI](https://docs.aws.amazon.com/cli/latest/userguide/getting-started-install.html) - An AWS account with credentials configured locally. ## Setting up the infrastructure From 5be108340ad7926ba69eba9a16c8baf5a2bd2785 Mon Sep 17 00:00:00 2001 From: Rudransh Shrivastava Date: Sat, 25 Oct 2025 00:28:40 +0530 Subject: [PATCH 34/35] add set -e --- infrastructure/modules/ecs/main.tf | 1 + 1 file changed, 1 insertion(+) diff --git a/infrastructure/modules/ecs/main.tf b/infrastructure/modules/ecs/main.tf index b4707b3982..2ba8c19d3c 100644 --- a/infrastructure/modules/ecs/main.tf +++ b/infrastructure/modules/ecs/main.tf @@ -211,6 +211,7 @@ module "index_data_task" { "/bin/sh", "-c", <<-EOT + set -e python manage.py algolia_reindex python manage.py algolia_update_replicas python manage.py algolia_update_synonyms From 9650e5e5658a20aa07d728b5095244d48e0f5d0e Mon Sep 17 00:00:00 2001 From: Arkadii Yakovets Date: Sat, 25 Oct 2025 17:02:11 -0700 Subject: [PATCH 35/35] Update code --- .gitignore | 10 ++--- infrastructure/README.md | 49 +++++++++++++++++-------- infrastructure/terraform.tfvars.example | 6 +-- 3 files changed, 42 insertions(+), 23 deletions(-) diff --git a/.gitignore b/.gitignore index 8c93455b22..4c69638274 100644 --- a/.gitignore +++ b/.gitignore @@ -17,9 +17,6 @@ __pycache__ .python_history .python-version .ruff_cache -**/.terraform/ -*.tfstate -*.tfstate.* .venv/ .vscode *.code-workspace @@ -27,10 +24,13 @@ __pycache__ *.log *.pdf *.pem -backend/*nest-backend-dev*.zip +*.tfstate +*.tfstate.* +**/.terraform/ backend/*nest-backend-dev*.tar.gz -backend/*nest-backend-staging*.zip +backend/*nest-backend-dev*.zip backend/*nest-backend-staging*.tar.gz +backend/*nest-backend-staging*.zip backend/data/backup* backend/staticfiles backend/zappa_settings.json diff --git a/infrastructure/README.md b/infrastructure/README.md index c1f4dd07a5..2c6feade93 100644 --- a/infrastructure/README.md +++ b/infrastructure/README.md @@ -3,6 +3,7 @@ This document provides instructions on how to setup the infrastructure for this project. ## Prerequisites + Ensure you have the following setup/installed: - Setup Project: [CONTRIBUTING.md](https://github.com/OWASP/Nest/blob/main/CONTRIBUTING.md) @@ -11,6 +12,7 @@ Ensure you have the following setup/installed: - An AWS account with credentials configured locally. ## Setting up the infrastructure + Follow these steps to set up the infrastructure: 1. **Change the Directory**: @@ -20,6 +22,7 @@ Follow these steps to set up the infrastructure: ```bash cd infrastructure/ ``` + *Note*: The following steps assume the current working directory is `infrastructure/` 2. **Create Variables File**: @@ -35,10 +38,17 @@ Follow these steps to set up the infrastructure: ```bash cat terraform.tfvars.example > terraform.tfvars ``` + - Update the default `django_` prefixed variables. (database/redis credentials will be added later) 3. **Apply Changes**: + - Init terraform if needed: + + ```bash + terraform init + ``` + - Apply the changes and create the infrastructure using the following command: ```bash @@ -54,11 +64,13 @@ Follow these steps to set up the infrastructure: ```bash terraform output ``` + Example Output: + ```bash - database_endpoint = "owasp-nest-staging-proxy.proxy-000000000000.ap-south-1.rds.amazonaws.com" + database_endpoint = "owasp-nest-staging-proxy.proxy-000000000000.us-east-2.rds.amazonaws.com" db_password = - ecr_repository_url = "000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend" + ecr_repository_url = "000000000000.dkr.ecr.us-east-2.amazonaws.com/owasp-nest-staging-backend" lambda_security_group_id = "sg-00000000000000000" private_subnet_ids = [ "subnet-00000000000000000", @@ -69,9 +81,11 @@ Follow these steps to set up the infrastructure: redis_endpoint = "master.owasp-nest-staging-cache.aaaaaa.region1.cache.amazonaws.com" zappa_s3_bucket = "owasp-nest-zappa-deployments" ``` + ```bash terraform output -raw db_password ``` + ```bash terraform output -raw redis_auth_token ``` @@ -83,6 +97,7 @@ Follow these steps to set up the infrastructure: ```bash terraform apply ``` + *Note*: Step 4 and 5 ensure that ECS/Fargate tasks have proper environment variables. These two steps will be removed when AWS Secrets Manager is integrated. @@ -97,6 +112,7 @@ The Django backend deployment is managed by Zappa. This includes the API Gateway ```bash cd ../backend/ ``` + *Note*: The following steps assume the current working directory is `backend/` 2. **Setup Dependencies**: @@ -104,30 +120,30 @@ The Django backend deployment is managed by Zappa. This includes the API Gateway - This step may differ for different operating systems. - The goal is to install dependencies listed in `pyproject.toml`. - Steps for Linux: + ```bash poetry install && eval $(poetry env activate) ``` -3. **Create Zappa Settings File**: +3. **Create Zappa Settings File**: - - Create a local Zappa settings file in the `backend` directory: +- Create a local Zappa settings file in the `backend` directory: ```bash touch zappa_settings.json ``` - - Copy the contents from the template file into your new local environment file: +- Copy the contents from the template file into your new local environment file: ```bash cat zappa_settings.example.json > zappa_settings.json ``` -4. **Populate Settings File**: - - - Replace all `${...}` variables in `zappa_settings.json` with appropriate output variables. +4. **Populate Settings File**: +- Replace all `${...}` variables in `zappa_settings.json` with appropriate output variables. -5. **Deploy**: +5. **Deploy**: ```bash zappa deploy staging @@ -142,12 +158,12 @@ Migrate and load data into the new database. 1. **Setup ECR Image**: - Login to the Elastic Container Registry using the following command: - *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + *Note*: replace `us-east-2` with configured region and `000000000000` with AWS Account ID. *Warning*: Configure a credential helper instead of using following command to login. ```bash - aws ecr get-login-password --region ap-south-1 | docker login --username AWS --password-stdin 000000000000.dkr.ecr.ap-south-1.amazonaws.com + aws ecr get-login-password --region us-east-2 | docker login --username AWS --password-stdin 000000000000.dkr.ecr.us-east-2.amazonaws.com ``` - Build the backend image using the following command: @@ -157,15 +173,17 @@ Migrate and load data into the new database. ``` - Tag the image: - *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + *Note*: replace `us-east-2` with configured region and `000000000000` with AWS Account ID. + ```bash - docker tag owasp-nest-staging-backend:latest 000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend:latest + docker tag owasp-nest-staging-backend:latest 000000000000.dkr.ecr.us-east-2.amazonaws.com/owasp-nest-staging-backend:latest ``` - Push the image: - *Note*: replace `ap-south-1` with configured region and `000000000000` with AWS Account ID. + *Note*: replace `us-east-2` with configured region and `000000000000` with AWS Account ID. + ```bash - docker push 000000000000.dkr.ecr.ap-south-1.amazonaws.com/owasp-nest-staging-backend:latest + docker push 000000000000.dkr.ecr.us-east-2.amazonaws.com/owasp-nest-staging-backend:latest ``` 2. **Upload Fixture to S3**: @@ -205,6 +223,7 @@ Migrate and load data into the new database. ``` ## Helpful Commands + - To update a Zappa `staging` deployment run: ```bash diff --git a/infrastructure/terraform.tfvars.example b/infrastructure/terraform.tfvars.example index d6096d422a..a654271016 100644 --- a/infrastructure/terraform.tfvars.example +++ b/infrastructure/terraform.tfvars.example @@ -1,8 +1,8 @@ -availability_zones = ["ap-south-1a", "ap-south-1b", "ap-south-1c"] -aws_region = "ap-south-1" +availability_zones = ["us-east-2a", "us-east-2b", "us-east-2c"] +aws_region = "us-east-2" django_algolia_application_id = "" django_algolia_write_api_key = "" -django_allowed_hosts = ".execute-api.ap-south-1.amazonaws.com" +django_allowed_hosts = ".execute-api.us-east-2.amazonaws.com" django_aws_access_key_id = "" django_aws_secret_access_key = "" django_configuration = "Staging"