Skip to content

Commit

Permalink
Merge pull request #47 from igh9410/dev
Browse files Browse the repository at this point in the history
Configured Model Server, Nginx and restored Postgres data
  • Loading branch information
igh9410 authored May 5, 2024
2 parents dad4ef4 + 3468bc8 commit 08c4bd8
Show file tree
Hide file tree
Showing 26 changed files with 2,283 additions and 2 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
./database/*
.env
.env.*
45 changes: 43 additions & 2 deletions backend/Makefile
Original file line number Diff line number Diff line change
@@ -1,7 +1,48 @@
include .env
export

# Makefile`

# Run the application
run:
@echo "Running the application..."
# @cd cmd/mvp && go run main.go
@go run cmd/blabber-hive/main.go
@go run cmd/blabber-hive/main.go

# Build the application using Docker
docker-build:
@echo "Generating Swagger documentation before building the application..."
@swag init -g cmd/blabber-hive/main.go
@echo "Building the application using Docker..."
@./scripts/docker-build.sh

# Run the application using Docker Compose
docker-run:
@echo "Running the application using Docker Compose..."
@./scripts/docker-run.sh

# Run golangci-lint
linter:
golangci-lint run

# Create database migration file
create-migration:
@cd ./db/migrations && goose create $(filter-out $@,$(MAKECMDGOALS)) sql
@echo "Migration created."

goose-version:
@goose -dir db/migrations postgres "host=localhost user=$$POSTGRES_USERNAME password=$$POSTGRES_PASSWORD dbname=postgres sslmode=disable port=5432" version

# Run the database migrations
migrate-up:
@echo "Running the database migrations..."
@goose -dir db/migrations postgres "host=localhost user=$$POSTGRES_USERNAME password=$$POSTGRES_PASSWORD dbname=postgres sslmode=disable port=5432" up

# Rollback the database migrations
migrate-down:
@echo "Rolling back the database migrations..."
@goose -dir db/migrations postgres "host=localhost user=$$POSTGRES_USERNAME password=$$POSTGRES_PASSWORD dbname=postgres sslmode=disable port=5432" down

# Run the tests
test:
@echo "Running the tests..."
@go test ./... -v -cover -coverprofile=coverage.out
Empty file modified backend/db/create-tables.sql
100644 → 100755
Empty file.
16 changes: 16 additions & 0 deletions backend/db/migrations/20240504131541_create_users_in_chat.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
-- +goose Up
-- +goose StatementBegin

SELECT 'ALTER TABLE ' || tc.table_name || ' DROP CONSTRAINT ' || tc.constraint_name || ';'
FROM information_schema.table_constraints AS tc
JOIN information_schema.key_column_usage AS kcu
ON tc.constraint_name = kcu.constraint_name
JOIN information_schema.constraint_column_usage AS ccu
ON ccu.constraint_name = tc.constraint_name
WHERE constraint_type = 'FOREIGN KEY' AND tc.table_schema='public';
-- +goose StatementEnd

-- +goose Down
-- +goose StatementBegin
SELECT 'ALTER TABLE ' || tc.table_name || ' ADD CONSTRAINT ' || tc.constraint_name || ' FOREIGN KEY (' || kcu.column_name || ') REFERENCES ' || ccu.table_name || '(' || ccu.column_name || ');'
-- +goose StatementEnd
125 changes: 125 additions & 0 deletions docker-compose.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,125 @@
services:
blabber-hive:
image: igh9410/blabber-hive:v1
container_name: blabber-hive
ports:
- "8080:8080"
depends_on:
- postgres
- redis
- broker
- zookeeper
env_file:
- backend/.env.docker
environment:
- REDIS_URL=redis://redis:6379
- KAFKA_BROKER_URL=broker:9092
- TZ=Asia/Seoul
networks:
- blabber-hive

fastapi:
container_name: sentiment-analysis-server
ports:
- "8000:8000"
image: sentiment-analysis:v1
environment:
- TZ=Asia/Seoul
networks:
- blabber-hive

nginx:
container_name: nginx
image: nginx:latest
ports:
- "8001:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf
depends_on:
- blabber-hive
- fastapi
networks:
- blabber-hive

zookeeper: # ZooKepper for Kafka
image: confluentinc/cp-zookeeper:7.4.3
container_name: blabber-hive-zookeeper
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
networks:
- blabber-hive

broker: # Kafka Broker
image: confluentinc/cp-kafka:7.4.3
container_name: blabber-hive-broker
ports:
- "9092:9092"
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: "zookeeper:2181"
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_INTERNAL:PLAINTEXT
KAFKA_LISTENERS: PLAINTEXT://0.0.0.0:9092,PLAINTEXT_INTERNAL://0.0.0.0:29092
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_INTERNAL://broker:29092
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
volumes:
- ./wait-for-it.sh:/wait-for-it.sh
#command:
# ["/wait-for-it.sh", "zookeeper:2181", "--", "/etc/confluent/docker/run"]
networks:
- blabber-hive

postgres: # Postgres Container
image: postgres:15.5-alpine
container_name: blabber-hive-postgres
ports:
- "5432:5432"
networks:
- blabber-hive
env_file:
- backend/.env.docker
volumes:
- ./database:/var/lib/postgresql/data

redis: # Redis Container
image: redis:7.2.3-alpine3.18
container_name: blabber-hive-redis
ports:
- "6379:6379"
networks:
- blabber-hive
kafka-setup:
image: confluentinc/cp-kafka:latest
container_name: blabber-hive-kafka-setup
depends_on:
- broker
volumes:
- ./backend/create-kafka-topics.sh:/tmp/create-kafka-topics.sh
command: "/tmp/create-kafka-topics.sh"
networks:
- blabber-hive

prometheus:
image: prom/prometheus
container_name: blabber-hive-prometheus
ports:
- "9090:9090"
volumes:
- ./backend/prometheus.yml:/etc/prometheus/prometheus.yml
networks:
- blabber-hive
grafana:
image: grafana/grafana
container_name: blabber-hive-grafana
ports:
- "3001:3000"
networks:
- blabber-hive

networks:
blabber-hive:
driver: bridge
44 changes: 44 additions & 0 deletions fastapi-server/.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# If you prefer the allow list template instead of the deny list, see community template:
# https://github.com/github/gitignore/blob/main/community/Golang/Go.AllowList.gitignore
#
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib

# Test binary, built with `go test -c`
*.test

# Output of the go coverage tool, specifically when used with LiteIDE
*.out

# Dependency directories (remove the comment below to include it)
# vendor/

# Go workspace file
go.work

# Python bytecode
*.py[cod]
# Python cache
pycache/
*.pyc
# Virtual environments
.env
.venv
env/
venv/
# Editor directories and files
.vscode/
*.sublime-project
*.sublime-workspace
# Other Python files
*.egg-info/
*.egg
dist/
build/

.github/*
terraform/*
56 changes: 56 additions & 0 deletions fastapi-server/Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Makefile

# Run the application
run:
@echo "Running the application..."
@poetry run uvicorn app.main:app --reload

# Build the application using Docker
docker-build:
@echo "Building the application using Docker..."
@./scripts/docker-build.sh

# Run the application using Docker Compose
docker-run:
@echo "Running the application using Docker Compose..."
@./scripts/docker-run.sh

# Run flake8 linter
lint:
@echo "Running flake8 linter..."
@poetry run flake8 app/

# Run black code formatter
format:
@echo "Running black code formatter..."
@poetry run black app/

# Run the tests
test:
@echo "Running the tests..."
@poetry run pytest tests/ -v --cov=app --cov-report=term-missing

# Generate coverage report
coverage:
@echo "Generating coverage report..."
@poetry run coverage html

# Create a new migration file using Alembic
create-migration:
@echo "Creating a new migration file..."
@poetry run alembic revision --autogenerate -m "$(filter-out $@,$(MAKECMDGOALS))"

# Run the database migrations
migrate:
@echo "Running the database migrations..."
@poetry run alembic upgrade head

# Rollback the last database migration
migrate-rollback:
@echo "Rolling back the last database migration..."
@poetry run alembic downgrade -1

# Generate API documentation using Sphinx
docs:
@echo "Generating API documentation..."
@cd docs && poetry run make html
Empty file added fastapi-server/README.md
Empty file.
Empty file added fastapi-server/app/__init__.py
Empty file.
Empty file added fastapi-server/app/db.py
Empty file.
28 changes: 28 additions & 0 deletions fastapi-server/app/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# main.py
"""
The main module contains the FastAPI application instance
and the configuration for the application.
It imports the sentiment_router from the sentiment module
and includes it in the application using the include_router method.
The main module also defines a root route
that returns a simple message when accessed.
"""

from fastapi import FastAPI
from app.sentiment.routes import sentiment_router


app = FastAPI()
app.include_router(sentiment_router, prefix="/api")

if __name__ == "__main__":
import uvicorn

uvicorn.run(app, host="0.0.0.0", port=8000)


@app.get("/")
async def root():
"""
The root route returns a simple message when accessed."""
return {"message": "Hello World"}
Empty file.
41 changes: 41 additions & 0 deletions fastapi-server/app/models/sentiment_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
"""
transformers is a library that provides a wide variety of pre-trained models
for Natural Language Processing (NLP) tasks.
"""

from transformers import AutoModelForSequenceClassification, AutoTokenizer


class SentimentModel:
"""
SentimentModel class is a wrapper around
a pre-trained model for sentiment analysis.
"""

def __init__(
self,
model_name="distilbert-base-uncased-finetuned-sst-2-english",
):
self.model_name = model_name
self.model = AutoModelForSequenceClassification.from_pretrained(
model_name
)
self.tokenizer = AutoTokenizer.from_pretrained(model_name)

def predict_sentiment(self, text):
"""
predict_sentiment method takes a text input and returns
the predicted sentiment.
"""
inputs = self.tokenizer(
text, return_tensors="pt", padding=True, truncation=True
)
outputs = self.model(**inputs)
predicted_class = outputs.logits.argmax().item()

if predicted_class == 0:
return "negative"
elif predicted_class == 1:
return "neutral"
else:
return "positive"
Empty file.
Loading

0 comments on commit 08c4bd8

Please sign in to comment.