diff --git a/.env.example b/.env.example index bd7cfffb..c2960c72 100644 --- a/.env.example +++ b/.env.example @@ -2,6 +2,9 @@ POSTGRES_USER=navigator POSTGRES_PASSWORD=password ADMIN_POSTGRES_HOST=admin_backend_db +PGUSER=navigator +PGPASSWORD=password +PGHOST=admin_backend_db # API SECRET_KEY=secret_test_key diff --git a/Dockerfile b/Dockerfile index 4dda3c4e..3796f38f 100644 --- a/Dockerfile +++ b/Dockerfile @@ -3,6 +3,12 @@ FROM python:3.10-slim WORKDIR /usr/src ENV PYTHONPATH=/usr/src +# Install PostgreSQL client tools +RUN apt-get update && \ + apt-get install -y --no-install-recommends \ + postgresql-client && \ + rm -rf /var/lib/apt/lists/* + # Requirements RUN pip install --no-cache-dir poetry==1.7.1 COPY poetry.lock pyproject.toml ./ diff --git a/poetry.lock b/poetry.lock index 6a6c9493..ad528f7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -1790,8 +1790,8 @@ files = [ annotated-types = ">=0.6.0" pydantic-core = "2.23.4" typing-extensions = [ - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, {version = ">=4.6.1", markers = "python_version < \"3.13\""}, + {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, ] [package.extras] @@ -1931,8 +1931,8 @@ files = [ astroid = ">=3.3.4,<=3.4.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = [ - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" diff --git a/tests/integration_tests/conftest.py b/tests/integration_tests/conftest.py index 83d87cab..2b6a262c 100644 --- a/tests/integration_tests/conftest.py +++ b/tests/integration_tests/conftest.py @@ -1,6 +1,6 @@ -import os -import uuid -from typing import Dict, Generator +import subprocess +import tempfile +from typing import Dict import boto3 import pytest @@ -9,7 +9,6 @@ from fastapi.testclient import TestClient from moto import mock_s3 from sqlalchemy import create_engine -from sqlalchemy.engine import Connection from sqlalchemy.orm import sessionmaker from sqlalchemy_utils import create_database, database_exists, drop_database @@ -40,29 +39,51 @@ UNFCCC_ORG_ID = 2 SUPER_ORG_ID = 50 +migration_file = None -def get_test_db_url() -> str: - return SQLALCHEMY_DATABASE_URI + f"_test_{uuid.uuid4()}" +def _create_engine_run_migrations(test_db_url: str): + test_engine = create_engine(test_db_url) + run_migrations(test_engine) + return test_engine -@pytest.fixture(scope="function") -def slow_db(monkeypatch): - """Create a fresh test database for each test.""" - test_db_url = get_test_db_url() +def do_cached_migrations(test_db_url: str): + + global migration_file # Note this is scoped to the module, so it will not get recreated. # Create the test database if database_exists(test_db_url): drop_database(test_db_url) create_database(test_db_url) + test_engine = None + + if not migration_file: + test_engine = _create_engine_run_migrations(test_db_url) + migration_file = tempfile.NamedTemporaryFile().name + result = subprocess.run(["pg_dump", "-f", migration_file]) + assert result.returncode == 0 + else: + result = subprocess.run(["psql", "-f", migration_file]) + assert result.returncode == 0 + test_engine = create_engine(test_db_url) + + return test_engine + + +@pytest.fixture(scope="function") +def data_db(monkeypatch): + """Create a fresh test database for each test.""" + + test_db_url = SQLALCHEMY_DATABASE_URI # Use the same db - cannot parrallelize tests + + test_engine = do_cached_migrations(test_db_url) test_session = None connection = None try: - test_engine = create_engine(test_db_url) - connection = test_engine.connect() - run_migrations(test_engine) + connection = test_engine.connect() test_session_maker = sessionmaker( autocommit=False, autoflush=False, @@ -86,58 +107,6 @@ def get_test_db(): drop_database(test_db_url) -@pytest.fixture(scope="session") -def data_db_connection() -> Generator[Connection, None, None]: - test_db_url = get_test_db_url() - - if database_exists(test_db_url): - drop_database(test_db_url) - create_database(test_db_url) - - saved_db_url = os.environ["DATABASE_URL"] - os.environ["DATABASE_URL"] = test_db_url - - test_engine = create_engine(test_db_url) - - run_migrations(test_engine) - connection = test_engine.connect() - - yield connection - connection.close() - - os.environ["DATABASE_URL"] = saved_db_url - drop_database(test_db_url) - - -@pytest.fixture(scope="function") -def data_db(slow_db): - yield slow_db - - -# @pytest.fixture(scope="function") -# def data_db(data_db_connection, monkeypatch): - -# outer = data_db_connection.begin_nested() -# SessionLocal = sessionmaker( -# autocommit=False, autoflush=False, bind=data_db_connection -# ) -# session = SessionLocal() - -# def get_test_db(): -# return session - -# monkeypatch.setattr(db_session, "get_db", get_test_db) -# yield session -# if not outer.is_active: -# print("Outer transaction already completed.") -# #raise RuntimeError("Outer transaction already completed.") -# else: -# outer.rollback() -# n_cols = data_db_connection.execute("select count(*) from collection") -# if n_cols.scalar() != 0: -# raise RuntimeError("Database not cleaned up properly") - - @pytest.fixture def client(): """Get a TestClient instance that reads/write to the test database."""