Skip to content

Commit

Permalink
Change tests to dump the first migration and use for all other tests (#…
Browse files Browse the repository at this point in the history
…236)

* add default env vars for pg client

* add postgresql-client to the docker image

* Change tests to dump the first migration and use for all other tests

* remove the defn of slow_db
  • Loading branch information
diversemix authored Oct 28, 2024
1 parent 11d5882 commit 0c67391
Show file tree
Hide file tree
Showing 4 changed files with 46 additions and 68 deletions.
3 changes: 3 additions & 0 deletions .env.example
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@
POSTGRES_USER=navigator
POSTGRES_PASSWORD=password
ADMIN_POSTGRES_HOST=admin_backend_db
PGUSER=navigator
PGPASSWORD=password
PGHOST=admin_backend_db

# API
SECRET_KEY=secret_test_key
Expand Down
6 changes: 6 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,12 @@ FROM python:3.10-slim
WORKDIR /usr/src
ENV PYTHONPATH=/usr/src

# Install PostgreSQL client tools
RUN apt-get update && \
apt-get install -y --no-install-recommends \
postgresql-client && \
rm -rf /var/lib/apt/lists/*

# Requirements
RUN pip install --no-cache-dir poetry==1.7.1
COPY poetry.lock pyproject.toml ./
Expand Down
6 changes: 3 additions & 3 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

99 changes: 34 additions & 65 deletions tests/integration_tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import os
import uuid
from typing import Dict, Generator
import subprocess
import tempfile
from typing import Dict

import boto3
import pytest
Expand All @@ -9,7 +9,6 @@
from fastapi.testclient import TestClient
from moto import mock_s3
from sqlalchemy import create_engine
from sqlalchemy.engine import Connection
from sqlalchemy.orm import sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database

Expand Down Expand Up @@ -40,29 +39,51 @@
UNFCCC_ORG_ID = 2
SUPER_ORG_ID = 50

migration_file = None

def get_test_db_url() -> str:
return SQLALCHEMY_DATABASE_URI + f"_test_{uuid.uuid4()}"

def _create_engine_run_migrations(test_db_url: str):
test_engine = create_engine(test_db_url)
run_migrations(test_engine)
return test_engine

@pytest.fixture(scope="function")
def slow_db(monkeypatch):
"""Create a fresh test database for each test."""

test_db_url = get_test_db_url()
def do_cached_migrations(test_db_url: str):

global migration_file # Note this is scoped to the module, so it will not get recreated.

# Create the test database
if database_exists(test_db_url):
drop_database(test_db_url)
create_database(test_db_url)

test_engine = None

if not migration_file:
test_engine = _create_engine_run_migrations(test_db_url)
migration_file = tempfile.NamedTemporaryFile().name
result = subprocess.run(["pg_dump", "-f", migration_file])
assert result.returncode == 0
else:
result = subprocess.run(["psql", "-f", migration_file])
assert result.returncode == 0
test_engine = create_engine(test_db_url)

return test_engine


@pytest.fixture(scope="function")
def data_db(monkeypatch):
"""Create a fresh test database for each test."""

test_db_url = SQLALCHEMY_DATABASE_URI # Use the same db - cannot parrallelize tests

test_engine = do_cached_migrations(test_db_url)
test_session = None
connection = None
try:
test_engine = create_engine(test_db_url)
connection = test_engine.connect()

run_migrations(test_engine)
connection = test_engine.connect()
test_session_maker = sessionmaker(
autocommit=False,
autoflush=False,
Expand All @@ -86,58 +107,6 @@ def get_test_db():
drop_database(test_db_url)


@pytest.fixture(scope="session")
def data_db_connection() -> Generator[Connection, None, None]:
test_db_url = get_test_db_url()

if database_exists(test_db_url):
drop_database(test_db_url)
create_database(test_db_url)

saved_db_url = os.environ["DATABASE_URL"]
os.environ["DATABASE_URL"] = test_db_url

test_engine = create_engine(test_db_url)

run_migrations(test_engine)
connection = test_engine.connect()

yield connection
connection.close()

os.environ["DATABASE_URL"] = saved_db_url
drop_database(test_db_url)


@pytest.fixture(scope="function")
def data_db(slow_db):
yield slow_db


# @pytest.fixture(scope="function")
# def data_db(data_db_connection, monkeypatch):

# outer = data_db_connection.begin_nested()
# SessionLocal = sessionmaker(
# autocommit=False, autoflush=False, bind=data_db_connection
# )
# session = SessionLocal()

# def get_test_db():
# return session

# monkeypatch.setattr(db_session, "get_db", get_test_db)
# yield session
# if not outer.is_active:
# print("Outer transaction already completed.")
# #raise RuntimeError("Outer transaction already completed.")
# else:
# outer.rollback()
# n_cols = data_db_connection.execute("select count(*) from collection")
# if n_cols.scalar() != 0:
# raise RuntimeError("Database not cleaned up properly")


@pytest.fixture
def client():
"""Get a TestClient instance that reads/write to the test database."""
Expand Down

0 comments on commit 0c67391

Please # to comment.