Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Code quality check
on:
push
jobs:
python_quality_check:
run_python_quality_check:
name: Run Python quality checks
runs-on: ubuntu-latest
steps:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ permissions:
issues: write
pull-requests: write
jobs:
stale:
check_stale_items:
name: Close stale issues and pull requests
runs-on: ubuntu-latest
steps:
Expand Down
39 changes: 0 additions & 39 deletions .github/workflows/deploy.yaml

This file was deleted.

21 changes: 21 additions & 0 deletions .github/workflows/launch_application.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
---
name: Launch application
on:
schedule:
- cron: '0 13 * * 1,2,3,4,5' # launch at 8:00 AM EST
jobs:
launch_application:
name: Launch application on weekday schedule
runs-on: ubuntu-latest
environment: pulumi
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Flox
uses: flox/install-flox-action@v2
- name: Deploy with Pulumi
uses: flox/activate-action@v1
env:
PULUMI_ACCESS_TOKEN: ${{ secrets.PULUMI_ACCESS_TOKEN }}
with:
command: mise tasks run infrastructure:up
37 changes: 0 additions & 37 deletions .github/workflows/lifecycle.yaml

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ name: Test and coverage check
on:
push
jobs:
python_test_and_coverage:
run_python_test_and_coverage:
name: Run Python tests
runs-on: ubuntu-latest
steps:
Expand Down
22 changes: 22 additions & 0 deletions .github/workflows/teardown_application.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
---
name: Teardown application
on:
schedule:
- cron: '0 23 * * 1,2,3,4,5' # teardown at 6:00 PM EST
jobs:
teardown_application:
name: Teardown application on weekday schedule
if: github.event.schedule == '0 23 * * 1,2,3,4,5'
runs-on: ubuntu-latest
environment: pulumi
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Flox
uses: flox/install-flox-action@v2
- name: Dismantle with Pulumi
uses: flox/activate-action@v1
env:
PULUMI_ACCESS_TOKEN: ${{ secrets.PULUMI_ACCESS_TOKEN }}
with:
command: mise tasks run infrastructure:down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ infrastructure/Pulumi.production.yaml
.coverage*
.coverage/
coverage.xml
infrastructure/kubeconfig.json
9 changes: 6 additions & 3 deletions .mise.toml
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
[env]
COMPOSE_BAKE=true
COMPOSE_BAKE = true

[tasks."python:install"]
description = "Install Python dependencies"
run = "uv sync --all-packages"
run = "uv sync --all-packages --all-groups"
Comment thread
forstmeier marked this conversation as resolved.
Comment thread
forstmeier marked this conversation as resolved.

[tasks."python:format"]
description = "Format Python code"
Expand Down Expand Up @@ -87,11 +87,14 @@ yamllint -d "{extends: relaxed, rules: {line-length: {max: 110}}}" .

[tasks."infrastructure:up"]
description = "Launch cloud infrastructure"
# temporarily comment out ping test due to non-exposed endpoints
run = """
set -e
cd infrastructure
uv run pulumi up --yes --stack pocketsizefund/pocketsizefund/production
nu ping.nu
pulumi stack output KUBECONFIG > kubeconfig.json
export KUBECONFIG=<(pulumi stack output KUBECONFIG)
# nu ping.nu
"""

[tasks."infrastructure:down"]
Expand Down
7 changes: 2 additions & 5 deletions application/datamanager/features/steps/equity_bars_steps.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,14 @@
import os
import sys
from pathlib import Path

sys.path.insert(0, str(Path(__file__).parent.parent.parent))

import requests
from behave import given, then, when
from behave import given, then, when # type: ignore
from behave.runner import Context


@given("I have date ranges")
def step_impl_date_ranges(context: Context) -> None:
for row in context.table: # ty: ignore
for row in context.table: # type: ignore
context.start_date = row["start_date"]
context.end_date = row["end_date"]

Expand Down
2 changes: 1 addition & 1 deletion application/datamanager/features/steps/health_steps.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import requests
from behave import when
from behave import when # type: ignore
from behave.runner import Context


Expand Down
7 changes: 4 additions & 3 deletions application/datamanager/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,10 +10,11 @@ dependencies = [
"polars>=1.29.0",
"pyarrow>=20.0.0",
"loguru>=0.7.3",
"google-cloud-storage>=2.16.0",
"httpx>=0.28.1",
"requests>=2.31.0",
"prometheus-fastapi-instrumentator>=7.1.0",
"loguru>=0.7.3",
"cloudevents>=1.12.0",
"polygon-api-client>=1.14.6",
"boto3>=1.38.23",
]

[tool.hatch.build.targets.wheel]
Expand Down
45 changes: 45 additions & 0 deletions application/datamanager/src/datamanager/clients.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
from typing import cast

import boto3
from polygon import RESTClient
from polygon.rest.models.aggs import GroupedDailyAgg


class PolygonClient:
def __init__(self, polygon_api_key: str) -> None:
self.polygon_client = RESTClient(api_key=polygon_api_key)

def get_all_equity_bars(self, date: str) -> list[GroupedDailyAgg]:
grouped = self.polygon_client.get_grouped_daily_aggs(
date=date,
adjusted=True,
)

return cast("list[GroupedDailyAgg]", grouped)


class S3Client:
def __init__(self, data_bucket_name: str) -> None:
self.s3_client = boto3.client("s3")
self.data_bucket_name = data_bucket_name
self.daily_equity_bars_path = f"s3://{self.data_bucket_name}/equity/bars/"

def list_objects(self, prefix: str = "") -> list[str]:
objects = []
paginator = self.s3_client.get_paginator("list_objects_v2")

for page in paginator.paginate(Bucket=self.data_bucket_name, Prefix=prefix):
if "Contents" in page:
objects.extend([obj["Key"] for obj in page["Contents"]])

return objects

def delete_objects(self, object_names: list[str]) -> None:
if not object_names:
return

delete_requests = [{"Key": obj} for obj in object_names]
self.s3_client.delete_objects(
Bucket=self.data_bucket_name,
Delete={"Objects": delete_requests},
)
47 changes: 0 additions & 47 deletions application/datamanager/src/datamanager/config.py

This file was deleted.

Loading