From 6bc6e80f7c0bb261497e21ab6727546fa606ddce Mon Sep 17 00:00:00 2001 From: reglim Date: Mon, 21 Nov 2022 14:06:44 +0100 Subject: [PATCH] Refactor(docat): Apply Requests from PR, improve Tests Docat now only creates the folders on startup, which has simplified the tests. The Search tests now also test for file results (content and name matches). Models with internal uses were renamed- --- README.md | 26 +-- docat/README.md | 2 +- docat/docat/app.py | 66 +++---- docat/docat/models.py | 7 +- docat/docat/utils.py | 46 +++-- docat/file.txt | 0 docat/tests/conftest.py | 56 +++--- docat/tests/test_hide_show.py | 11 +- docat/tests/test_index.py | 282 +++++++++++++++++++------- docat/tests/test_search.py | 362 +++++++++++++++++++++++++++++++++- docat/tests/test_setup.py | 23 --- docat/tests/test_upload.py | 2 +- docat/tests/test_utils.py | 13 +- 13 files changed, 676 insertions(+), 220 deletions(-) delete mode 100644 docat/file.txt delete mode 100644 docat/tests/test_setup.py diff --git a/README.md b/README.md index 9e62bb10b..77d1d4f06 100644 --- a/README.md +++ b/README.md @@ -13,26 +13,10 @@ you can optionally use volumes to persist state: ```sh # run container in background and persist data (docs, nginx configs and tokens database as well as the content index) # use 'ghcr.io/docat-org/docat:unstable' to get the latest changes -mkdir -p docat-run/db && touch docat-run/db/db.json && touch docat-run/db/index.json +mkdir -p docat-run/ docker run \ --detach \ - --volume $PWD/docat-run/doc:/var/docat/doc/ \ - --volume $PWD/docat-run/db/:/app/docat/ \ - --publish 8000:80 \ - ghcr.io/docat-org/docat -``` - -*Alternative:* Mount a dedicated directory to host `db.json` and `index.json`: - -```sh -# run container in background and persist data (docs, nginx configs and tokens database as well as the content index) -# use 'ghcr.io/docat-org/docat:unstable' to get the latest changes -mkdir -p docat-run/db && touch docat-run/db/db.json && touch docat-run/db/index.json -docker run \ - --detach \ - --volume $PWD/docat-run/doc:/var/docat/doc/ \ - --volume $PWD/docat-run/db:/var/docat/db/ \ - --env DOCAT_DB_DIR=/var/docat/db/ + --volume $PWD/docat-run/doc:/var/docat/ \ --publish 8000:80 \ ghcr.io/docat-org/docat ``` @@ -53,7 +37,7 @@ DEV_DOC_PATH="$(mktemp -d)" poetry install # run the local development version -DOCAT_SERVE_FILES=1 DOCAT_INDEX_FILES=1 DOCAT_DOC_PATH="$DEV_DOC_PATH" poetry run python -m docat +DOCAT_SERVE_FILES=1 DOCAT_DOC_PATH="$DEV_DOC_PATH" poetry run python -m docat ``` After this you need to start the frontend (inside the `web/` folder): @@ -116,13 +100,13 @@ It is possible to configure some things after the fact. Supported config options: -* headerHTML +- headerHTML ## Advanced Usage ### Hide Controls -If you would like to send link to a specific version of the documentation without the option to change the version, you can do so by clicking on the `Hide Controls` button. This will hide the control buttons and change the link, which can then be copied as usual. +If you would like to send link to a specific version of the documentation without the option to change the version, you can do so by clicking on the `Hide Controls` button. This will hide the control buttons and change the link, which can then be copied as usual. ### Indexing diff --git a/docat/README.md b/docat/README.md index 7222bc1c6..1a220709e 100644 --- a/docat/README.md +++ b/docat/README.md @@ -20,7 +20,7 @@ poetry install * **DOCAT_SERVE_FILES**: Serve static documentation instead of a nginx (for testing) * **DOCAT_INDEX_FILES**: Index files on start for searching -* **DOCAT_DOC_PATH**: Upload directory for static files (needs to match nginx config) +* **DOCAT_STORAGE_PATH**: Upload directory for static files (needs to match nginx config) * **FLASK_DEBUG**: Start flask in debug mode ## Usage diff --git a/docat/docat/app.py b/docat/docat/app.py index e43a2e177..5f6b3f502 100644 --- a/docat/docat/app.py +++ b/docat/docat/app.py @@ -11,7 +11,7 @@ import secrets import shutil from pathlib import Path -from typing import Optional, Tuple +from typing import Optional import magic from fastapi import Depends, FastAPI, File, Header, Response, UploadFile, status @@ -22,8 +22,8 @@ from docat.models import ( ApiResponse, ClaimResponse, - ProjectDetailResponse, - ProjectsResponse, + ProjectDetail, + Projects, SearchResponse, SearchResultFile, SearchResultProject, @@ -56,36 +56,23 @@ redoc_url="/api/redoc", ) -DOCAT_DB_DIR_STR = os.getenv("DOCAT_DB_DIR") +DOCAT_STORAGE_PATH = Path(os.getenv("DOCAT_STORAGE_PATH") or Path("/var/docat")) +DOCAT_DB_PATH = DOCAT_STORAGE_PATH / DB_PATH +DOCAT_INDEX_PATH = DOCAT_STORAGE_PATH / INDEX_PATH +DOCAT_UPLOAD_FOLDER = DOCAT_STORAGE_PATH / UPLOAD_FOLDER -if not DOCAT_DB_DIR_STR: - # Default Database locations - DOCAT_DB_DIR = Path.cwd() - DOCAT_DB_PATH = Path(DB_PATH) - DOCAT_INDEX_PATH = Path(INDEX_PATH) -else: - # Custom Database locations - DOCAT_DB_DIR = Path(DOCAT_DB_DIR_STR) - DOCAT_DB_PATH = DOCAT_DB_DIR / "db.json" - DOCAT_INDEX_PATH = DOCAT_DB_DIR / "index.json" - -DOCAT_DB_DIR.mkdir(parents=True, exist_ok=True) -DOCAT_DB_PATH.touch() -DOCAT_INDEX_PATH.touch() - -#: Holds the static base path where the uploaded documentation artifacts are stored -DOCAT_UPLOAD_FOLDER = Path(os.getenv("DOCAT_DOC_PATH", UPLOAD_FOLDER)) - -if not DOCAT_DB_PATH.exists(): +@app.on_event("startup") +def startup_create_folders(): + # Create the folders if they don't exist DOCAT_UPLOAD_FOLDER.mkdir(parents=True, exist_ok=True) - -db = TinyDB(DOCAT_DB_PATH) + DOCAT_DB_PATH.touch() + DOCAT_INDEX_PATH.touch() def get_db(): """Return the cached TinyDB instance.""" - return db + return TinyDB(DOCAT_DB_PATH) @app.post("/api/index/update", response_model=ApiResponse, status_code=status.HTTP_200_OK) @@ -95,22 +82,22 @@ def update_index(): return ApiResponse(message="Successfully updated search index") -@app.get("/api/projects", response_model=ProjectsResponse, status_code=status.HTTP_200_OK) +@app.get("/api/projects", response_model=Projects, status_code=status.HTTP_200_OK) def get_projects(): if not DOCAT_UPLOAD_FOLDER.exists(): - return ProjectsResponse(projects=[]) + return Projects(projects=[]) return get_all_projects(DOCAT_UPLOAD_FOLDER) @app.get( "/api/projects/{project}", - response_model=ProjectDetailResponse, + response_model=ProjectDetail, status_code=status.HTTP_200_OK, responses={status.HTTP_404_NOT_FOUND: {"model": ApiResponse}}, ) @app.get( "/api/projects/{project}/", - response_model=ProjectDetailResponse, + response_model=ProjectDetail, status_code=status.HTTP_200_OK, responses={status.HTTP_404_NOT_FOUND: {"model": ApiResponse}}, ) @@ -127,14 +114,14 @@ def get_project(project): @app.get("/api/search/", response_model=SearchResponse, status_code=status.HTTP_200_OK) def search(query: str): query = query.lower() - found_projects: list[SearchResultProject] = list() - found_versions: list[SearchResultVersion] = list() - found_files: list[SearchResultFile] = list() + found_projects: list[SearchResultProject] = [] + found_versions: list[SearchResultVersion] = [] + found_files: list[SearchResultFile] = [] index_db = TinyDB(DOCAT_INDEX_PATH) project_table = index_db.table("projects") projects = project_table.all() - all_versions: list[Tuple] = list() + all_versions: list[tuple] = [] # Collect all projects that contain the query for project in projects: @@ -271,6 +258,9 @@ def hide_version( with open(hidden_file, "w") as f: f.close() + update_version_index_for_project(DOCAT_UPLOAD_FOLDER, DOCAT_INDEX_PATH, project) + remove_file_index_from_db(DOCAT_INDEX_PATH, project, version) + return ApiResponse(message=f"Version {version} is now hidden") @@ -306,6 +296,9 @@ def show_version( os.remove(hidden_file) + update_version_index_for_project(DOCAT_UPLOAD_FOLDER, DOCAT_INDEX_PATH, project) + update_file_index_for_project_version(DOCAT_UPLOAD_FOLDER, DOCAT_INDEX_PATH, project, version) + return ApiResponse(message=f"Version {version} is now shown") @@ -331,7 +324,7 @@ def upload( if base_path.exists(): token_status = check_token_for_project(db, docat_api_key, project) if token_status.valid: - remove_docs(project, version) + remove_docs(project, version, DOCAT_UPLOAD_FOLDER) else: response.status_code = status.HTTP_401_UNAUTHORIZED return ApiResponse(message=token_status.reason) @@ -441,7 +434,7 @@ def rename(project: str, new_project_name: str, response: Response, docat_api_ke def delete(project: str, version: str, response: Response, docat_api_key: str = Header(None), db: TinyDB = Depends(get_db)): token_status = check_token_for_project(db, docat_api_key, project) if token_status.valid: - message = remove_docs(project, version) + message = remove_docs(project, version, DOCAT_UPLOAD_FOLDER) if message: response.status_code = status.HTTP_404_NOT_FOUND return ApiResponse(message=message) @@ -471,6 +464,7 @@ def check_token_for_project(db, token, project) -> TokenStatus: # serve_local_docs for local testing without a nginx if os.environ.get("DOCAT_SERVE_FILES"): + DOCAT_UPLOAD_FOLDER.mkdir(parents=True, exist_ok=True) app.mount("/doc", StaticFiles(directory=DOCAT_UPLOAD_FOLDER, html=True), name="docs") # index local files on start diff --git a/docat/docat/models.py b/docat/docat/models.py index 0dd903b6f..fd3cd9347 100644 --- a/docat/docat/models.py +++ b/docat/docat/models.py @@ -1,5 +1,4 @@ from dataclasses import dataclass -from typing import Optional from pydantic import BaseModel @@ -7,7 +6,7 @@ @dataclass(frozen=True) class TokenStatus: valid: bool - reason: Optional[str] = None + reason: str | None = None class ApiResponse(BaseModel): @@ -18,7 +17,7 @@ class ClaimResponse(ApiResponse): token: str -class ProjectsResponse(BaseModel): +class Projects(BaseModel): projects: list[str] @@ -27,7 +26,7 @@ class ProjectVersion(BaseModel): tags: list[str] -class ProjectDetailResponse(BaseModel): +class ProjectDetail(BaseModel): name: str versions: list[ProjectVersion] diff --git a/docat/docat/utils.py b/docat/docat/utils.py index 31100b06d..9c2cd0145 100644 --- a/docat/docat/utils.py +++ b/docat/docat/utils.py @@ -11,10 +11,10 @@ from bs4.element import Comment from tinydb import Query, TinyDB -from docat.models import ProjectDetailResponse, ProjectsResponse, ProjectVersion +from docat.models import ProjectDetail, Projects, ProjectVersion NGINX_CONFIG_PATH = Path("/etc/nginx/locations.d") -UPLOAD_FOLDER = Path("/var/docat/doc") +UPLOAD_FOLDER = "doc" DB_PATH = "db.json" INDEX_PATH = "index.json" @@ -55,7 +55,7 @@ def extract_archive(target_file, destination): target_file.unlink() # remove the zip file -def remove_docs(project, version): +def remove_docs(project: str, version: str, upload_folder_path: Path): """ Delete documentation @@ -63,7 +63,7 @@ def remove_docs(project, version): project (str): name of the project version (str): project version """ - docs = UPLOAD_FOLDER / project / version + docs = upload_folder_path / project / version if docs.exists(): # remove the requested version # rmtree can not remove a symlink @@ -99,7 +99,7 @@ def calculate_token(password, salt): return hashlib.pbkdf2_hmac("sha256", password.encode("utf-8"), salt, 100000).hex() -def get_all_projects(upload_folder_path: Path): +def get_all_projects(upload_folder_path: Path) -> Projects: """ Returns all projects in the upload folder. """ @@ -110,7 +110,7 @@ def has_not_hidden_versions(project): (path / version).is_dir() and not (path / version / ".hidden").exists() for version in (upload_folder_path / project).iterdir() ) - return ProjectsResponse( + return Projects( projects=list( filter( has_not_hidden_versions, @@ -120,7 +120,7 @@ def has_not_hidden_versions(project): ) -def get_project_details(upload_folder_path: Path, project_name: str): +def get_project_details(upload_folder_path: Path, project_name: str) -> ProjectDetail | None: """ Returns all versions and tags for a project. """ @@ -131,7 +131,7 @@ def get_project_details(upload_folder_path: Path, project_name: str): tags = [x for x in docs_folder.iterdir() if x.is_dir() and x.is_symlink()] - return ProjectDetailResponse( + return ProjectDetail( name=project_name, versions=sorted( [ @@ -157,8 +157,7 @@ def index_all_projects( and save it into index.json. """ # drop existing index - if index_db_path.exists(): - open(index_db_path, "w").close() + index_db_path.unlink(missing_ok=True) all_projects = get_all_projects(upload_folder_path).projects @@ -177,6 +176,9 @@ def update_file_index_for_project(upload_folder_path: Path, index_db_path: Path, project_details = get_project_details(upload_folder_path, project) + if not project_details: + return + for version in project_details.versions: update_file_index_for_project_version(upload_folder_path, index_db_path, project, version.name) @@ -198,7 +200,7 @@ def update_file_index_for_project_version(upload_folder_path: Path, index_db_pat # save the file path path = str(file.relative_to(docs_folder)) - content = get_html_content_as_str(file) if file.name.endswith(".html") else "" + content = get_html_content(file) if file.name.endswith(".html") else "" insert_file_index_into_db(index_db_path, project, version, path, content) @@ -215,11 +217,14 @@ def update_version_index_for_project(upload_folder_path: Path, index_db_path: Pa details = get_project_details(upload_folder_path, project) + if not details: + return + for version in details.versions: insert_version_into_version_index(index_db_path, project, version.name, version.tags) -def get_html_content_as_str(file_path: Path): +def get_html_content(file_path: Path) -> str: """ Returns the content of a html file as a string. """ @@ -230,12 +235,11 @@ def html_tag_visible(element): return True - with open(file_path, "r") as f: - file_content = f.read() - soup = BeautifulSoup(file_content, "html.parser") - text_content = filter(html_tag_visible, soup.findAll(string=True)) - content = " ".join(t.strip() for t in text_content).lower() - return content + file_content = file_path.read_text() + soup = BeautifulSoup(file_content, "html.parser") + text_content = filter(html_tag_visible, soup.findAll(string=True)) + content = " ".join(t.strip() for t in text_content).lower() + return content def insert_file_index_into_db(index_db_path: Path, project: str, version: str, file_path: str, content: str): @@ -301,12 +305,12 @@ def remove_version_from_version_index(index_db_path: Path, project: str, version projects_table = index_db.table("projects") Project = Query() - found_project = projects_table.search(Project.name == project)[0] + found_projects = projects_table.search(Project.name == project) - if not found_project: + if not found_projects: return - found_versions = found_project.get("versions") + found_versions = found_projects[0].get("versions") if not found_versions or version not in (v["name"] for v in found_versions): return diff --git a/docat/file.txt b/docat/file.txt deleted file mode 100644 index e69de29bb..000000000 diff --git a/docat/tests/conftest.py b/docat/tests/conftest.py index 42c3bbf13..d3c946c3e 100644 --- a/docat/tests/conftest.py +++ b/docat/tests/conftest.py @@ -4,28 +4,37 @@ import pytest from fastapi.testclient import TestClient from tinydb import TinyDB -from tinydb.storages import MemoryStorage import docat.app as docat from docat.utils import create_symlink -@pytest.fixture -def client(): +@pytest.fixture(autouse=True) +def setup_docat_paths(): + """ + Set up the temporary paths for the docat app. + """ + temp_dir = tempfile.TemporaryDirectory() - docat.DOCAT_UPLOAD_FOLDER = Path(temp_dir.name) + docat.DOCAT_STORAGE_PATH = Path(temp_dir.name) + docat.DOCAT_DB_PATH = Path(temp_dir.name) / "db.json" docat.DOCAT_INDEX_PATH = Path(temp_dir.name) / "index.json" - docat.db = TinyDB(storage=MemoryStorage) - docat.index_db = TinyDB(storage=MemoryStorage) - yield TestClient(docat.app) - docat.app.db = None - docat.app.index_db = None + docat.DOCAT_UPLOAD_FOLDER = Path(temp_dir.name) / "doc" + + yield + temp_dir.cleanup() @pytest.fixture -def upload_folder_path(): - return docat.DOCAT_UPLOAD_FOLDER +def client(): + docat.db = TinyDB(docat.DOCAT_DB_PATH) + docat.index_db = TinyDB(docat.DOCAT_INDEX_PATH) + + yield TestClient(docat.app) + + docat.app.db = None + docat.app.index_db = None @pytest.fixture @@ -37,31 +46,22 @@ def client_with_claimed_project(client): @pytest.fixture -def temp_project_version(tmp_path): - docs = tmp_path / "doc" - - docs.mkdir() - +def temp_project_version(): def __create(project, version): - version_docs = docs / project / version + version_docs = docat.DOCAT_UPLOAD_FOLDER / project / version version_docs.mkdir(parents=True) (version_docs / "index.html").touch() - create_symlink(version_docs, docs / project / "latest") + create_symlink(version_docs, docat.DOCAT_UPLOAD_FOLDER / project / "latest") - return docs + return docat.DOCAT_UPLOAD_FOLDER yield __create @pytest.fixture -def index_db_path(upload_folder_path): - return upload_folder_path / "index.json" - - -@pytest.fixture -def index_db_project_table(index_db_path): - index_db = TinyDB(index_db_path) +def index_db_project_table(): + index_db = TinyDB(docat.DOCAT_INDEX_PATH) projects_table = index_db.table("projects") yield projects_table @@ -70,8 +70,8 @@ def index_db_project_table(index_db_path): @pytest.fixture -def index_db_files_table(index_db_path): - index_db = TinyDB(index_db_path) +def index_db_files_table(): + index_db = TinyDB(docat.DOCAT_INDEX_PATH) projects_table = index_db.table("files") yield projects_table diff --git a/docat/tests/test_hide_show.py b/docat/tests/test_hide_show.py index c8fcb04ea..e92ebfc2a 100644 --- a/docat/tests/test_hide_show.py +++ b/docat/tests/test_hide_show.py @@ -1,7 +1,8 @@ import io -from pathlib import Path from unittest.mock import patch +import docat.app as docat + def test_hide(client_with_claimed_project): """ @@ -70,11 +71,11 @@ def test_hide_only_version_not_listed_in_projects(client_with_claimed_project): assert project_details_response.json() == {"name": "some-project", "versions": []} -def test_hide_creates_hidden_file(client_with_claimed_project, upload_folder_path): +def test_hide_creates_hidden_file(client_with_claimed_project): """ Tests that the hidden file is created when hiding a version """ - hidden_file_path = Path(upload_folder_path) / "some-project" / "1.0.0" / ".hidden" + hidden_file_path = docat.DOCAT_UPLOAD_FOLDER / "some-project" / "1.0.0" / ".hidden" # create a version create_response = client_with_claimed_project.post( @@ -223,11 +224,11 @@ def test_show(client_with_claimed_project): } -def test_show_deletes_hidden_file(client_with_claimed_project, upload_folder_path): +def test_show_deletes_hidden_file(client_with_claimed_project): """ Tests that the hidden file is deleted when requesting show. """ - hidden_file_path = Path(upload_folder_path) / "some-project" / "1.0.0" / ".hidden" + hidden_file_path = docat.DOCAT_UPLOAD_FOLDER / "some-project" / "1.0.0" / ".hidden" # create a version create_response = client_with_claimed_project.post( diff --git a/docat/tests/test_index.py b/docat/tests/test_index.py index a38d26299..a61d2984f 100644 --- a/docat/tests/test_index.py +++ b/docat/tests/test_index.py @@ -1,9 +1,9 @@ import io import os import shutil -from pathlib import Path from unittest.mock import patch +import docat.app as docat from docat.utils import ( index_all_projects, insert_file_index_into_db, @@ -16,7 +16,7 @@ ) -def test_insert_file_index_into_db(client_with_claimed_project, index_db_path, index_db_files_table): +def test_insert_file_index_into_db(client_with_claimed_project, index_db_files_table): """ Tests wether insert_file_index_into_db inserts the correct json into the database. @@ -25,12 +25,12 @@ def test_insert_file_index_into_db(client_with_claimed_project, index_db_path, i project = "some-project" version = "1.0.0" - insert_file_index_into_db(index_db_path, project, version, "index.html", "hello world") + insert_file_index_into_db(docat.DOCAT_INDEX_PATH, project, version, "index.html", "hello world") assert index_db_files_table.all() == [{"path": "index.html", "content": "hello world", "project": project, "version": version}] -def test_remove_file_index_from_db(client_with_claimed_project, index_db_path, index_db_files_table): +def test_remove_file_index_from_db(client_with_claimed_project, index_db_files_table): """ Tests wether remove_file_index_from_db removes exactly the json insert_file_index_into_db wrote into the database. @@ -39,13 +39,13 @@ def test_remove_file_index_from_db(client_with_claimed_project, index_db_path, i project = "some-project" version = "1.0.0" - insert_file_index_into_db(index_db_path, project, version, "index.html", "hello world") - remove_file_index_from_db(index_db_path, project, version) + insert_file_index_into_db(docat.DOCAT_INDEX_PATH, project, version, "index.html", "hello world") + remove_file_index_from_db(docat.DOCAT_INDEX_PATH, project, version) assert index_db_files_table.all() == [] -def test_insert_version_into_version_index(client_with_claimed_project, index_db_path, index_db_project_table): +def test_insert_version_into_version_index(client_with_claimed_project, index_db_project_table): """ Tests wether insert_version_into_version_index inserts the correct json into the database. @@ -55,12 +55,12 @@ def test_insert_version_into_version_index(client_with_claimed_project, index_db version = "1.0.0" tag = "latest" - insert_version_into_version_index(index_db_path, project, version, [tag]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [tag]}]}] -def test_insert_version_into_version_index_no_duplicates(client_with_claimed_project, index_db_path, index_db_project_table): +def test_insert_version_into_version_index_no_duplicates(client_with_claimed_project, index_db_project_table): """ Tests wether insert_version_into_version_index doesn't create a new project or version when the version with the same tags already exists. @@ -71,13 +71,13 @@ def test_insert_version_into_version_index_no_duplicates(client_with_claimed_pro version = "1.0.0" tag = "latest" - insert_version_into_version_index(index_db_path, project, version, [tag]) - insert_version_into_version_index(index_db_path, project, version, [tag]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [tag]}]}] -def test_insert_version_into_version_index_second(client_with_claimed_project, index_db_path, index_db_project_table): +def test_insert_version_into_version_index_second(client_with_claimed_project, index_db_project_table): """ Tests wether insert_version_into_version_index appends the version when the project already exists. @@ -89,14 +89,14 @@ def test_insert_version_into_version_index_second(client_with_claimed_project, i tags = ["latest", "stable"] for version, tag in zip(versions, tags): - insert_version_into_version_index(index_db_path, project, version, [tag]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) assert index_db_project_table.all() == [ {"name": project, "versions": [{"name": versions[0], "tags": [tags[0]]}, {"name": versions[1], "tags": [tags[1]]}]} ] -def test_insert_version_into_version_index_second_with_different_tags(client_with_claimed_project, index_db_path, index_db_project_table): +def test_insert_version_into_version_index_second_with_different_tags(client_with_claimed_project, index_db_project_table): """ Tests wether insert_version_into_version_index correctly overwrites tags. For example, when a version is tagged as "latest" and then as "stable" and "nightly" , the "latest" tag should be removed. @@ -108,16 +108,16 @@ def test_insert_version_into_version_index_second_with_different_tags(client_wit old_tags = ["latest"] new_tags = ["stale", "nightly"] - insert_version_into_version_index(index_db_path, project, version, [old_tags]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [old_tags]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [old_tags]}]}] - insert_version_into_version_index(index_db_path, project, version, [new_tags]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [new_tags]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [new_tags]}]}] -def test_insert_version_into_version_index_second_with_overlapping_tags(client_with_claimed_project, index_db_path, index_db_project_table): +def test_insert_version_into_version_index_second_with_overlapping_tags(client_with_claimed_project, index_db_project_table): """ Tests wether insert_version_into_version_index correctly overwrites tags. For example, when a version is tagged as "latest" and then as "stable" and "latest", the tags should become "stable" and "latest". @@ -129,14 +129,14 @@ def test_insert_version_into_version_index_second_with_overlapping_tags(client_w old_tags = ["latest"] new_tags = ["stable", "latest"] - insert_version_into_version_index(index_db_path, project, version, [old_tags]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [old_tags]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [old_tags]}]}] - insert_version_into_version_index(index_db_path, project, version, [new_tags]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [new_tags]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": version, "tags": [new_tags]}]}] -def test_remove_version_from_version_index(client_with_claimed_project, index_db_path, index_db_project_table): +def test_remove_version_from_version_index(client_with_claimed_project, index_db_project_table): """ Tests that only the version given is removed from the database. @@ -147,19 +147,17 @@ def test_remove_version_from_version_index(client_with_claimed_project, index_db tags = ["latest", "stable"] for version, tag in zip(versions, tags): - insert_version_into_version_index(index_db_path, project, version, [tag]) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) assert index_db_project_table.all() == [ {"name": project, "versions": [{"name": versions[0], "tags": [tags[0]]}, {"name": versions[1], "tags": [tags[1]]}]} ] - remove_version_from_version_index(index_db_path, project, versions[1]) + remove_version_from_version_index(docat.DOCAT_INDEX_PATH, project, versions[1]) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": versions[0], "tags": [tags[0]]}]}] -def test_remove_version_from_version_index_remove_last_version( - client_with_claimed_project, upload_folder_path, index_db_path, index_db_project_table -): +def test_remove_version_from_version_index_remove_last_version(client_with_claimed_project, index_db_project_table): """ Tests wether remove_version_from_version_index removes the whole project from the database if the last version is removed. @@ -169,13 +167,13 @@ def test_remove_version_from_version_index_remove_last_version( version = "1.0.0" tag = "latest" - insert_version_into_version_index(index_db_path, project, version, [tag]) - remove_version_from_version_index(index_db_path, project, version) + insert_version_into_version_index(docat.DOCAT_INDEX_PATH, project, version, [tag]) + remove_version_from_version_index(docat.DOCAT_INDEX_PATH, project, version) assert index_db_project_table.all() == [] -def test_update_version_index_for_project(client_with_claimed_project, upload_folder_path, index_db_path, index_db_project_table): +def test_update_version_index_for_project(client_with_claimed_project, index_db_project_table): """ Tests wether update_version_index_for_project correctly handles inserting and deleting versions. @@ -184,27 +182,27 @@ def test_update_version_index_for_project(client_with_claimed_project, upload_fo project = "some-project" versions = ["1.0.0", "2.0.0"] - upload_folder = Path(upload_folder_path) + project_folder = docat.DOCAT_UPLOAD_FOLDER / project # we need to create the project folders manually, # since the api already updates the index for version in versions: - (upload_folder / project / version).mkdir(parents=True) + (project_folder / version).mkdir(parents=True) - with open(upload_folder / project / version / "index.html", "w") as f: + with open(project_folder / version / "index.html", "w") as f: f.write("

Hello World

") - update_version_index_for_project(upload_folder_path, index_db_path, project) + update_version_index_for_project(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project) assert index_db_project_table.all() == [ {"name": project, "versions": [{"name": versions[1], "tags": []}, {"name": versions[0], "tags": []}]} ] - shutil.rmtree(upload_folder / project / versions[0]) - update_version_index_for_project(upload_folder_path, index_db_path, project) + shutil.rmtree(project_folder / versions[0]) + update_version_index_for_project(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project) assert index_db_project_table.all() == [{"name": project, "versions": [{"name": versions[1], "tags": []}]}] -def test_update_file_index_for_project_version(client_with_claimed_project, upload_folder_path, index_db_path, index_db_files_table): +def test_update_file_index_for_project_version(client_with_claimed_project, index_db_files_table): """ Tests wether update_file_index_for_project_version correctly handles inserting and deleting files. @@ -214,30 +212,28 @@ def test_update_file_index_for_project_version(client_with_claimed_project, uplo version = "1.0.0" files = ["index.html", "style.css"] - upload_folder = Path(upload_folder_path) - # we need to create the project folders manually, # since the api already updates the index - (upload_folder / project / version).mkdir(parents=True) + (docat.DOCAT_UPLOAD_FOLDER / project / version).mkdir(parents=True) for file in files: - with open(upload_folder / project / version / file, "w") as f: + with open(docat.DOCAT_UPLOAD_FOLDER / project / version / file, "w") as f: f.write("

Hello World

") - update_file_index_for_project_version(upload_folder_path, index_db_path, project, version) + update_file_index_for_project_version(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project, version) assert index_db_files_table.all().sort(key=lambda e: e.get("path")) == [ {"path": files[1], "content": "", "project": project, "version": version}, {"path": files[0], "content": "hello world", "project": project, "version": version}, ].sort(key=lambda e: e["path"]) - os.remove(upload_folder / project / version / files[0]) - update_file_index_for_project_version(upload_folder_path, index_db_path, project, version) + os.remove(docat.DOCAT_UPLOAD_FOLDER / project / version / files[0]) + update_file_index_for_project_version(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project, version) assert index_db_files_table.all() == [ {"path": files[1], "content": "", "project": project, "version": version}, ] -def test_update_file_index_for_project_version_folder_does_not_exist(client_with_claimed_project, upload_folder_path, index_db_path): +def test_update_file_index_for_project_version_folder_does_not_exist(client_with_claimed_project): """ Tests wether the function just returns when the folder for the given project / version does not exist. @@ -246,11 +242,11 @@ def test_update_file_index_for_project_version_folder_does_not_exist(client_with project = "non-existing-project" with patch("docat.utils.TinyDB") as mock_tinydb: - update_file_index_for_project_version(upload_folder_path, index_db_path, project, "1.0.0") + update_file_index_for_project_version(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project, "1.0.0") mock_tinydb.assert_not_called() -def test_update_file_index_for_project(client_with_claimed_project, upload_folder_path, index_db_path, index_db_files_table): +def test_update_file_index_for_project(client_with_claimed_project, index_db_files_table): """ Tests wether update_file_index_for_project correctly handles inserting and deleting versions. @@ -259,28 +255,26 @@ def test_update_file_index_for_project(client_with_claimed_project, upload_folde project = "some-project" versions = ["1.0.0", "2.0.0"] - upload_folder = Path(upload_folder_path) - # we need to create the project folders manually, # since the api already updates the index for version in versions: - (upload_folder / project / version).mkdir(parents=True) + (docat.DOCAT_UPLOAD_FOLDER / project / version).mkdir(parents=True) - with open(upload_folder / project / version / "index.html", "w") as f: + with open(docat.DOCAT_UPLOAD_FOLDER / project / version / "index.html", "w") as f: f.write("

Hello World

") - update_file_index_for_project(upload_folder_path, index_db_path, project) + update_file_index_for_project(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project) assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ {"path": "index.html", "content": "hello world", "project": project, "version": versions[1]}, {"path": "index.html", "content": "hello world", "project": project, "version": versions[0]}, ].sort(key=lambda e: e["version"]) - shutil.rmtree(upload_folder / project / versions[0]) - update_file_index_for_project(upload_folder_path, index_db_path, project) + shutil.rmtree(docat.DOCAT_UPLOAD_FOLDER / project / versions[0]) + update_file_index_for_project(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project) assert index_db_files_table.all() == [{"path": "index.html", "content": "hello world", "project": project, "version": versions[1]}] -def test_index_project_with_html_content(client_with_claimed_project, upload_folder_path, index_db_path): +def test_index_project_with_html_content(client_with_claimed_project): """ Tests wether the function creates an index for a given project as expected. """ @@ -297,10 +291,10 @@ def test_index_project_with_html_content(client_with_claimed_project, upload_fol assert create_project_response.status_code == 201 with patch("docat.utils.insert_file_index_into_db") as mock_insert_file_index_into_db: - update_file_index_for_project_version(upload_folder_path, index_db_path, project, version) + update_file_index_for_project_version(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project, version) mock_insert_file_index_into_db.assert_called_once_with( - index_db_path, + docat.DOCAT_INDEX_PATH, project, version, file, @@ -308,7 +302,7 @@ def test_index_project_with_html_content(client_with_claimed_project, upload_fol ) -def test_index_project_non_html(client_with_claimed_project, upload_folder_path, index_db_path): +def test_index_project_non_html(client_with_claimed_project): """ Tests wether the function ignores the content of non-html files as expected. """ @@ -325,9 +319,9 @@ def test_index_project_non_html(client_with_claimed_project, upload_folder_path, assert create_project_response.status_code == 201 with patch("docat.utils.insert_file_index_into_db") as mock_insert_file_index_into_db: - update_file_index_for_project_version(upload_folder_path, index_db_path, project, version) + update_file_index_for_project_version(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH, project, version) mock_insert_file_index_into_db.assert_called_once_with( - index_db_path, + docat.DOCAT_INDEX_PATH, project, version, file, @@ -335,7 +329,7 @@ def test_index_project_non_html(client_with_claimed_project, upload_folder_path, ) -def test_index_all_projects_creates_version_and_tag_index(client_with_claimed_project, upload_folder_path, index_db_path): +def test_index_all_projects_creates_version_and_tag_index(client_with_claimed_project): """ Tests wether index_all_projects finds all versions and creates the index accordingly. """ @@ -357,12 +351,12 @@ def test_index_all_projects_creates_version_and_tag_index(client_with_claimed_pr assert tag_project_response.status_code == 201 with patch("docat.utils.insert_version_into_version_index") as mock_insert_version_into_version_index: - index_all_projects(upload_folder_path, index_db_path) - mock_insert_version_into_version_index.assert_any_call(index_db_path, project, versions[0], [tags[0]]) - mock_insert_version_into_version_index.assert_any_call(index_db_path, project, versions[1], [tags[1]]) + index_all_projects(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH) + mock_insert_version_into_version_index.assert_any_call(docat.DOCAT_INDEX_PATH, project, versions[0], [tags[0]]) + mock_insert_version_into_version_index.assert_any_call(docat.DOCAT_INDEX_PATH, project, versions[1], [tags[1]]) -def test_index_all_projects_creates_file_and_version_index(client_with_claimed_project, upload_folder_path, index_db_path): +def test_index_all_projects_creates_file_and_version_index(client_with_claimed_project): """ Tests wether index_all_projects finds all projects and versions and creates the index accordingly. """ @@ -381,14 +375,14 @@ def test_index_all_projects_creates_file_and_version_index(client_with_claimed_p with patch("docat.utils.insert_version_into_version_index") as mock_insert_version_into_version_index, patch( "docat.utils.insert_file_index_into_db" ) as mock_insert_file_index_into_db: - index_all_projects(upload_folder_path, index_db_path) + index_all_projects(docat.DOCAT_UPLOAD_FOLDER, docat.DOCAT_INDEX_PATH) for project in projects: for version in versions: - mock_insert_version_into_version_index.assert_any_call(index_db_path, project, version, []) - mock_insert_file_index_into_db.assert_any_call(index_db_path, project, version, "index.html", "hello world") + mock_insert_version_into_version_index.assert_any_call(docat.DOCAT_INDEX_PATH, project, version, []) + mock_insert_file_index_into_db.assert_any_call(docat.DOCAT_INDEX_PATH, project, version, "index.html", "hello world") -def test_index_all_projects_creates_file_and_version_index_api(client_with_claimed_project, index_db_path): +def test_index_all_projects_creates_file_and_version_index_api(client_with_claimed_project): """ Tests via the API wether index_all_projects finds all projects and versions and creates the index accordingly. """ @@ -412,5 +406,155 @@ def test_index_all_projects_creates_file_and_version_index_api(client_with_claim for project in projects: for version in versions: - mock_insert_version_into_version_index.assert_any_call(index_db_path, project, version, []) - mock_insert_file_index_into_db.assert_any_call(index_db_path, project, version, "index.html", "hello world") + mock_insert_version_into_version_index.assert_any_call(docat.DOCAT_INDEX_PATH, project, version, []) + mock_insert_file_index_into_db.assert_any_call(docat.DOCAT_INDEX_PATH, project, version, "index.html", "hello world") + + +def test_hide_show_removes_file_index_and_adds_again_only_version(client_with_claimed_project, index_db_files_table): + """ + Tests that the hide function removes the files of the version from the index and that + show adds it again with only one version. + """ + project = "some-project" + version = "1.0.0" + + # create a project with a version + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + # make sure we have the files in the index + assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ + {"path": "index.html", "content": "hello world", "project": project, "version": version}, + ].sort(key=lambda e: e["version"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure the files are gone from the index + assert index_db_files_table.all() == [] + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure it's back + assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ + {"path": "index.html", "content": "hello world", "project": project, "version": version}, + ].sort(key=lambda e: e["version"]) + + +def test_hide_show_removes_file_index_and_adds_again(client_with_claimed_project, index_db_files_table): + """ + Tests that the hide function removes the files of the version from the index and that + show adds it again. + """ + project = "some-project" + versions = ["1.0.0", "2.0.0"] + + for version in versions: + # create a project with a version + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + # make sure we have the files in the index + assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ + {"path": "index.html", "content": "hello world", "project": project, "version": version[0]}, + {"path": "index.html", "content": "hello world", "project": project, "version": version[1]}, + ].sort(key=lambda e: e["version"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{versions[0]}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure the files are gone from the index + assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ + {"path": "index.html", "content": "hello world", "project": project, "version": version[1]}, + ].sort(key=lambda e: e["version"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{versions[0]}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure they're back + assert index_db_files_table.all().sort(key=lambda e: e.get("version")) == [ + {"path": "index.html", "content": "hello world", "project": project, "version": version[0]}, + {"path": "index.html", "content": "hello world", "project": project, "version": version[1]}, + ].sort(key=lambda e: e["version"]) + + +def test_hide_show_removes_project_index_and_adds_again_on_hide_and_show_of_only_version( + client_with_claimed_project, index_db_project_table +): + """ + Tests that the hide function removes the version and project + from the index if the only version gets hidden and that show adds it again. + """ + project = "some-project" + version = "1.0.0" + + # create a project with a version + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + # make sure we have the version in the index + assert index_db_project_table.all().sort(key=lambda e: e.get("name")) == [ + {"name": project, "versions": [version]}, + ].sort(key=lambda e: e["name"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure the version and project is gone from the index + assert index_db_project_table.all() == [] + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure it's back + assert index_db_project_table.all().sort(key=lambda e: e.get("name")) == [ + {"name": project, "versions": [version]}, + ].sort(key=lambda e: e["name"]) + + +def test_hide_show_removes_version_from_index(client_with_claimed_project, index_db_project_table): + """ + Tests that the hide function removes the version + from the index if it gets hidden and that show adds it again. + """ + project = "some-project" + versions = ["1.0.0", "2.0.0"] + + for version in versions: + # create a project with a version + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + # make sure we have the version in the index + assert index_db_project_table.all().sort(key=lambda e: e.get("name")) == [ + {"name": project, "versions": [{"name": v, "tags": []} for v in versions]}, + ].sort(key=lambda e: e["name"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{versions[0]}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure the version is gone from the index + assert index_db_project_table.all().sort(key=lambda e: e.get("name")) == [ + {"name": project, "versions": [{"name": versions[1], "tags": []}]}, + ].sort(key=lambda e: e["name"]) + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{versions[0]}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + # make sure it's back + assert index_db_project_table.all().sort(key=lambda e: e.get("name")) == [ + {"name": project, "versions": [{"name": v, "tags": []} for v in versions]}, + ].sort(key=lambda e: e["name"]) diff --git a/docat/tests/test_search.py b/docat/tests/test_search.py index dbed11799..811b3a417 100644 --- a/docat/tests/test_search.py +++ b/docat/tests/test_search.py @@ -1,7 +1,5 @@ import io -import docat - def test_search_finds_project_by_name(client_with_claimed_project): """ @@ -247,11 +245,10 @@ def test_index_updated_on_rename(client_with_claimed_project): } -def test_search_updated_on_delete(client_with_claimed_project, upload_folder_path): +def test_search_updated_on_delete(client_with_claimed_project): """ The version and it's files should be removed from the index when deleted """ - docat.utils.UPLOAD_FOLDER = upload_folder_path project = "some-project" version = "1.0.0" version_to_delete = "1.0.1" @@ -300,3 +297,360 @@ def test_search_updated_on_delete(client_with_claimed_project, upload_folder_pat "versions": [], "files": [], } + + +def test_search_finds_files_by_name(client_with_claimed_project): + """ + The search should find files by name. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=index") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [], + "versions": [], + "files": [{"project": project, "version": version, "path": "index.html"}], + } + + +def test_search_finds_files_by_content_html(client_with_claimed_project): + """ + The search should find html files by content. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=hello%20world") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [], + "versions": [], + "files": [{"project": project, "version": version, "path": "index.html"}], + } + + +def test_search_ignores_content_for_non_html_files(client_with_claimed_project): + """ + The search should not find content of non-html files. + (Should be impossible anyways because indexing should already ignore the content.) + """ + + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.txt", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=hello%20world") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [], + "versions": [], + "files": [], + } + + +def test_search_ignores_files_of_hidden_versions_by_name(client_with_claimed_project): + """ + After a version was hidden, it's files should not be found by name anymore. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.txt", io.BytesIO(b"Lorem ipsum dolor sit..."), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=index") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [], + "versions": [], + "files": [{"project": project, "version": version, "path": "index.txt"}], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=index") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + +def test_search_ignores_files_of_hidden_versions_by_content(client_with_claimed_project): + """ + After a version was hidden, it's files should not be found by html content anymore. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=hello%20world") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [], + "versions": [], + "files": [{"project": project, "version": version, "path": "index.html"}], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=hello%20world") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + +def test_search_ignores_project_with_only_hidden_versions(client_with_claimed_project): + """ + The project should not be found when all it's versions are hidden. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=some-project") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [{"name": project}], + "versions": [], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=some-project") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + +def test_search_finds_project_with_only_hidden_versions_after_showing(client_with_claimed_project): + """ + The project should be found again when all it's versions are hidden and then shown again. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=some-project") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [{"name": project}], + "versions": [], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=some-project") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_1 = client_with_claimed_project.get("/api/search?query=some-project") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [{"name": project}], + "versions": [], + "files": [], + } + + +def test_search_ignores_hidden_versions(client_with_claimed_project): + """ + The version should not be found when it's hidden. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=1.0") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [], + "versions": [{"project": project, "version": version}], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=1.0.0") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + +def test_search_finds_shown_versions_after_hide(client_with_claimed_project): + """ + The version should be found again after it's hidden and shown again. + """ + project = "some-project" + version = "1.0.0" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": ("index.html", io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response_1 = client_with_claimed_project.get("/api/search?query=1.0") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [], + "versions": [{"project": project, "version": version}], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/hide", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_2 = client_with_claimed_project.get("/api/search?query=1.0") + assert search_response_2.status_code == 200 + assert search_response_2.json() == { + "projects": [], + "versions": [], + "files": [], + } + + hide_version_response = client_with_claimed_project.post(f"/api/{project}/{version}/show", headers={"Docat-Api-Key": "1234"}) + assert hide_version_response.status_code == 200 + + search_response_1 = client_with_claimed_project.get("/api/search?query=1.0") + assert search_response_1.status_code == 200 + assert search_response_1.json() == { + "projects": [], + "versions": [{"project": project, "version": version}], + "files": [], + } + + +def test_search_project_version_and_file_match(client_with_claimed_project): + """ + Test that the search finds the project, the version and a file with a matching name at the same time. + """ + project = "some-project" + version = "some-version" + file = "some-file.html" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": (file, io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=some") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [{"name": project}], + "versions": [{"project": project, "version": version}], + "files": [{"project": project, "version": version, "path": file}], + } + + +def test_search_project_version_content_match(client_with_claimed_project): + """ + Test that the search finds the project, the version and the file with matching content at the same time. + """ + project = "some-project" + version = "some-version" + file = "index.html" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": (file, io.BytesIO(b"

some content

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=some") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [{"name": project}], + "versions": [{"project": project, "version": version}], + "files": [{"project": project, "version": version, "path": file}], + } + + +def test_search_file_and_content_match_no_duplicates(client_with_claimed_project): + """ + Test that the search only returns the file once when the file name and the content match. + """ + project = "some-project" + version = "1.0.0" + file = "hello-world.html" + + create_project_response = client_with_claimed_project.post( + f"/api/{project}/{version}", + files={"file": (file, io.BytesIO(b"

Hello World

"), "plain/text")}, + ) + assert create_project_response.status_code == 201 + + search_response = client_with_claimed_project.get("/api/search?query=hello") + assert search_response.status_code == 200 + assert search_response.json() == { + "projects": [], + "versions": [], + "files": [{"project": project, "version": version, "path": file}], + } diff --git a/docat/tests/test_setup.py b/docat/tests/test_setup.py deleted file mode 100644 index 671acc5c6..000000000 --- a/docat/tests/test_setup.py +++ /dev/null @@ -1,23 +0,0 @@ -# import io -# import os -# import tempfile -# from pathlib import Path -# from unittest.mock import patch - -# import docat.utils as utils - -# def test_creates_missing_db_folder_and_files(): -# temp_dir = tempfile.TemporaryDirectory() -# db_dir = os.path.join(temp_dir.name, "db") -# utils.UPLOAD_FOLDER = Path(temp_dir.name) - -# with patch("os.makedirs") as makedirs_mock: -# import docat.app as docat - -# docat.update_index() - -# assert docat.DOCAT_DB_DIR == db_dir -# assert docat.DOCAT_DB_PATH == os.path.join(db_dir, "db.json") -# assert docat.DOCAT_INDEX_PATH == os.path.join(db_dir, "index.json") - -# temp_dir.cleanup() diff --git a/docat/tests/test_upload.py b/docat/tests/test_upload.py index 86fb200ff..1d5746693 100644 --- a/docat/tests/test_upload.py +++ b/docat/tests/test_upload.py @@ -30,7 +30,7 @@ def test_successfully_override(client_with_claimed_project): assert response.status_code == 201 assert response_data["message"] == "File successfully uploaded" - assert remove_mock.mock_calls == [call("some-project", "1.0.0")] + assert remove_mock.mock_calls == [call("some-project", "1.0.0", docat.DOCAT_UPLOAD_FOLDER)] def test_tags_are_not_overwritten_without_api_key(client_with_claimed_project): diff --git a/docat/tests/test_utils.py b/docat/tests/test_utils.py index 2ae3bd2b1..959ad2e92 100644 --- a/docat/tests/test_utils.py +++ b/docat/tests/test_utils.py @@ -1,6 +1,7 @@ from pathlib import Path from unittest.mock import MagicMock, patch +import docat.app as docat from docat.utils import create_symlink, extract_archive, remove_docs @@ -70,11 +71,10 @@ def test_archive_artifact(): def test_remove_version(temp_project_version): docs = temp_project_version("project", "1.0") - with patch("docat.utils.UPLOAD_FOLDER", docs): - remove_docs("project", "1.0") + remove_docs("project", "1.0", docat.DOCAT_UPLOAD_FOLDER) - assert docs.exists() - assert not (docs / "project").exists() + assert docs.exists() + assert not (docs / "project").exists() def test_remove_symlink_version(temp_project_version): @@ -83,7 +83,6 @@ def test_remove_symlink_version(temp_project_version): symlink_to_latest = docs / project / "latest" assert symlink_to_latest.is_symlink() - with patch("docat.utils.UPLOAD_FOLDER", docs): - remove_docs(project, "latest") + remove_docs(project, "latest", docat.DOCAT_UPLOAD_FOLDER) - assert not symlink_to_latest.exists() + assert not symlink_to_latest.exists()