Fixed 5 failing tests related to code_verifier column which was added by migration 001 but removed by migration 003. Changes: - Renamed legacy_db_without_code_verifier to legacy_db_basic - Updated column_exists tests to use 'state' column instead of 'code_verifier' - Updated test_run_migrations_legacy_database to test with generic column - Replaced test_actual_migration_001 with test_actual_migration_003 - Fixed test_dev_mode_requires_dev_admin_me to explicitly override DEV_ADMIN_ME All 551 tests now passing. Part of Phase 1 completion: IndieAuth authorization server removal 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
622 lines
22 KiB
Python
622 lines
22 KiB
Python
"""
|
|
Tests for database migration system
|
|
|
|
Tests cover:
|
|
- Fresh database detection (auto-skip migrations)
|
|
- Legacy database migration (apply migrations)
|
|
- Migration tracking
|
|
- Migration failure handling
|
|
- Helper functions
|
|
"""
|
|
|
|
import pytest
|
|
import sqlite3
|
|
import tempfile
|
|
from pathlib import Path
|
|
from datetime import datetime, timezone
|
|
|
|
from starpunk.migrations import (
|
|
MigrationError,
|
|
create_migrations_table,
|
|
is_schema_current,
|
|
table_exists,
|
|
column_exists,
|
|
index_exists,
|
|
get_applied_migrations,
|
|
discover_migration_files,
|
|
apply_migration,
|
|
run_migrations,
|
|
)
|
|
|
|
|
|
@pytest.fixture
|
|
def temp_db():
|
|
"""Create a temporary database for testing"""
|
|
with tempfile.NamedTemporaryFile(suffix=".db", delete=False) as f:
|
|
db_path = Path(f.name)
|
|
yield db_path
|
|
# Cleanup
|
|
if db_path.exists():
|
|
db_path.unlink()
|
|
|
|
|
|
@pytest.fixture
|
|
def temp_migrations_dir():
|
|
"""Create a temporary migrations directory"""
|
|
with tempfile.TemporaryDirectory() as tmpdir:
|
|
yield Path(tmpdir)
|
|
|
|
|
|
@pytest.fixture
|
|
def fresh_db_with_schema(temp_db):
|
|
"""Create a fresh database with current schema (no code_verifier after migration 003)"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
# Create auth_state table WITHOUT code_verifier (current schema after Phase 1)
|
|
conn.execute("""
|
|
CREATE TABLE auth_state (
|
|
state TEXT PRIMARY KEY,
|
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
expires_at TIMESTAMP NOT NULL,
|
|
redirect_uri TEXT
|
|
)
|
|
""")
|
|
# Also need other tables to make schema truly current
|
|
conn.execute("""
|
|
CREATE TABLE tokens (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
token_hash TEXT UNIQUE NOT NULL,
|
|
me TEXT NOT NULL,
|
|
client_id TEXT,
|
|
scope TEXT DEFAULT 'create',
|
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
expires_at TIMESTAMP NOT NULL,
|
|
last_used_at TIMESTAMP,
|
|
revoked_at TIMESTAMP
|
|
)
|
|
""")
|
|
conn.execute("""
|
|
CREATE TABLE authorization_codes (
|
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
code_hash TEXT UNIQUE NOT NULL,
|
|
me TEXT NOT NULL,
|
|
client_id TEXT NOT NULL,
|
|
redirect_uri TEXT NOT NULL,
|
|
scope TEXT,
|
|
state TEXT,
|
|
code_challenge TEXT,
|
|
code_challenge_method TEXT,
|
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
expires_at TIMESTAMP NOT NULL,
|
|
used_at TIMESTAMP
|
|
)
|
|
""")
|
|
# Add required indexes
|
|
conn.execute("CREATE INDEX idx_tokens_hash ON tokens(token_hash)")
|
|
conn.execute("CREATE INDEX idx_tokens_me ON tokens(me)")
|
|
conn.execute("CREATE INDEX idx_tokens_expires ON tokens(expires_at)")
|
|
conn.execute("CREATE INDEX idx_auth_codes_hash ON authorization_codes(code_hash)")
|
|
conn.execute("CREATE INDEX idx_auth_codes_expires ON authorization_codes(expires_at)")
|
|
conn.commit()
|
|
finally:
|
|
conn.close()
|
|
return temp_db
|
|
|
|
|
|
@pytest.fixture
|
|
def legacy_db_basic(temp_db):
|
|
"""Create a basic database with auth_state table"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
# Create auth_state table WITHOUT code_verifier (current schema)
|
|
conn.execute("""
|
|
CREATE TABLE auth_state (
|
|
state TEXT PRIMARY KEY,
|
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
expires_at TIMESTAMP NOT NULL,
|
|
redirect_uri TEXT
|
|
)
|
|
""")
|
|
conn.commit()
|
|
finally:
|
|
conn.close()
|
|
return temp_db
|
|
|
|
|
|
class TestMigrationsTable:
|
|
"""Tests for migrations tracking table"""
|
|
|
|
def test_create_migrations_table(self, temp_db):
|
|
"""Test creating schema_migrations tracking table"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Verify table exists
|
|
cursor = conn.execute(
|
|
"SELECT name FROM sqlite_master WHERE type='table' AND name='schema_migrations'"
|
|
)
|
|
assert cursor.fetchone() is not None
|
|
|
|
# Verify schema
|
|
cursor = conn.execute("PRAGMA table_info(schema_migrations)")
|
|
columns = {row[1]: row[2] for row in cursor.fetchall()}
|
|
assert 'id' in columns
|
|
assert 'migration_name' in columns
|
|
assert 'applied_at' in columns
|
|
|
|
# Verify index exists
|
|
cursor = conn.execute(
|
|
"SELECT name FROM sqlite_master WHERE type='index' AND name='idx_schema_migrations_name'"
|
|
)
|
|
assert cursor.fetchone() is not None
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_create_migrations_table_idempotent(self, temp_db):
|
|
"""Test that creating migrations table multiple times is safe"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
create_migrations_table(conn) # Should not raise error
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestSchemaDetection:
|
|
"""Tests for fresh database detection"""
|
|
|
|
def test_is_schema_current_with_code_verifier(self, fresh_db_with_schema):
|
|
"""Test detecting current schema (no code_verifier after Phase 1)"""
|
|
conn = sqlite3.connect(fresh_db_with_schema)
|
|
try:
|
|
assert is_schema_current(conn) is True
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_is_schema_current_without_code_verifier(self, legacy_db_basic):
|
|
"""Test detecting incomplete schema (missing tokens/authorization_codes tables)"""
|
|
conn = sqlite3.connect(legacy_db_basic)
|
|
try:
|
|
# Should be False because missing tokens and authorization_codes tables
|
|
assert is_schema_current(conn) is False
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_is_schema_current_no_table(self, temp_db):
|
|
"""Test detecting schema when auth_state table doesn't exist"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
assert is_schema_current(conn) is False
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestHelperFunctions:
|
|
"""Tests for database introspection helpers"""
|
|
|
|
def test_table_exists_true(self, fresh_db_with_schema):
|
|
"""Test detecting existing table"""
|
|
conn = sqlite3.connect(fresh_db_with_schema)
|
|
try:
|
|
assert table_exists(conn, 'auth_state') is True
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_table_exists_false(self, temp_db):
|
|
"""Test detecting non-existent table"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
assert table_exists(conn, 'nonexistent') is False
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_column_exists_true(self, fresh_db_with_schema):
|
|
"""Test detecting existing column"""
|
|
conn = sqlite3.connect(fresh_db_with_schema)
|
|
try:
|
|
# Test with a column that actually exists in current schema
|
|
assert column_exists(conn, 'auth_state', 'state') is True
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_column_exists_false(self, legacy_db_basic):
|
|
"""Test detecting non-existent column"""
|
|
conn = sqlite3.connect(legacy_db_basic)
|
|
try:
|
|
# Test with a column that doesn't exist
|
|
assert column_exists(conn, 'auth_state', 'nonexistent_column') is False
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_column_exists_no_table(self, temp_db):
|
|
"""Test column check on non-existent table"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
assert column_exists(conn, 'nonexistent', 'column') is False
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_index_exists_true(self, temp_db):
|
|
"""Test detecting existing index"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
conn.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)")
|
|
conn.execute("CREATE INDEX test_idx ON test(id)")
|
|
conn.commit()
|
|
assert index_exists(conn, 'test_idx') is True
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_index_exists_false(self, temp_db):
|
|
"""Test detecting non-existent index"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
assert index_exists(conn, 'nonexistent_idx') is False
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestMigrationTracking:
|
|
"""Tests for migration tracking operations"""
|
|
|
|
def test_get_applied_migrations_empty(self, temp_db):
|
|
"""Test getting applied migrations when none exist"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
applied = get_applied_migrations(conn)
|
|
assert applied == set()
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_get_applied_migrations_with_data(self, temp_db):
|
|
"""Test getting applied migrations with some recorded"""
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
conn.execute(
|
|
"INSERT INTO schema_migrations (migration_name) VALUES (?)",
|
|
("001_test.sql",)
|
|
)
|
|
conn.execute(
|
|
"INSERT INTO schema_migrations (migration_name) VALUES (?)",
|
|
("002_test.sql",)
|
|
)
|
|
conn.commit()
|
|
|
|
applied = get_applied_migrations(conn)
|
|
assert applied == {"001_test.sql", "002_test.sql"}
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestMigrationDiscovery:
|
|
"""Tests for migration file discovery"""
|
|
|
|
def test_discover_migration_files_empty(self, temp_migrations_dir):
|
|
"""Test discovering migrations when directory is empty"""
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
assert migrations == []
|
|
|
|
def test_discover_migration_files_with_files(self, temp_migrations_dir):
|
|
"""Test discovering migration files"""
|
|
# Create test migration files
|
|
(temp_migrations_dir / "001_first.sql").write_text("-- First migration")
|
|
(temp_migrations_dir / "002_second.sql").write_text("-- Second migration")
|
|
(temp_migrations_dir / "003_third.sql").write_text("-- Third migration")
|
|
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
|
|
assert len(migrations) == 3
|
|
assert migrations[0][0] == "001_first.sql"
|
|
assert migrations[1][0] == "002_second.sql"
|
|
assert migrations[2][0] == "003_third.sql"
|
|
|
|
def test_discover_migration_files_sorted(self, temp_migrations_dir):
|
|
"""Test that migrations are sorted correctly"""
|
|
# Create files out of order
|
|
(temp_migrations_dir / "003_third.sql").write_text("-- Third")
|
|
(temp_migrations_dir / "001_first.sql").write_text("-- First")
|
|
(temp_migrations_dir / "002_second.sql").write_text("-- Second")
|
|
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
|
|
# Should be sorted numerically
|
|
assert migrations[0][0] == "001_first.sql"
|
|
assert migrations[1][0] == "002_second.sql"
|
|
assert migrations[2][0] == "003_third.sql"
|
|
|
|
def test_discover_migration_files_nonexistent_dir(self):
|
|
"""Test discovering migrations when directory doesn't exist"""
|
|
nonexistent = Path("/nonexistent/migrations")
|
|
migrations = discover_migration_files(nonexistent)
|
|
assert migrations == []
|
|
|
|
|
|
class TestMigrationApplication:
|
|
"""Tests for applying individual migrations"""
|
|
|
|
def test_apply_migration_success(self, temp_db, temp_migrations_dir):
|
|
"""Test successfully applying a migration"""
|
|
# Create a simple migration
|
|
migration_file = temp_migrations_dir / "001_test.sql"
|
|
migration_file.write_text("CREATE TABLE test (id INTEGER PRIMARY KEY);")
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
apply_migration(conn, "001_test.sql", migration_file)
|
|
|
|
# Verify table was created
|
|
assert table_exists(conn, 'test')
|
|
|
|
# Verify migration was recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert "001_test.sql" in applied
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_apply_migration_failure(self, temp_db, temp_migrations_dir):
|
|
"""Test migration failure with invalid SQL"""
|
|
# Create a migration with invalid SQL
|
|
migration_file = temp_migrations_dir / "001_fail.sql"
|
|
migration_file.write_text("INVALID SQL SYNTAX;")
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
with pytest.raises(MigrationError, match="failed"):
|
|
apply_migration(conn, "001_fail.sql", migration_file)
|
|
|
|
# Verify migration was NOT recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert "001_fail.sql" not in applied
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestRunMigrations:
|
|
"""Integration tests for run_migrations()"""
|
|
|
|
def test_run_migrations_fresh_database(self, fresh_db_with_schema, temp_migrations_dir, monkeypatch):
|
|
"""Test fresh database scenario - migrations should be auto-marked as applied"""
|
|
# Create a test migration
|
|
migration_file = temp_migrations_dir / "001_add_code_verifier_to_auth_state.sql"
|
|
migration_file.write_text(
|
|
"ALTER TABLE auth_state ADD COLUMN code_verifier TEXT NOT NULL DEFAULT '';"
|
|
)
|
|
|
|
# Monkey-patch the migrations directory
|
|
import starpunk.migrations
|
|
original_path = Path(starpunk.migrations.__file__).parent.parent / "migrations"
|
|
|
|
def mock_run_migrations(db_path, logger=None):
|
|
# Temporarily replace migrations_dir in the function
|
|
return run_migrations(db_path, logger=logger)
|
|
|
|
# Patch Path to return our temp directory
|
|
monkeypatch.setattr(
|
|
'starpunk.migrations.Path',
|
|
lambda x: temp_migrations_dir.parent if str(x) == starpunk.migrations.__file__ else Path(x)
|
|
)
|
|
|
|
# Run migrations (should detect fresh DB and auto-skip)
|
|
# Since we can't easily monkey-patch the internal Path usage, we'll test the logic directly
|
|
conn = sqlite3.connect(fresh_db_with_schema)
|
|
try:
|
|
create_migrations_table(conn)
|
|
cursor = conn.execute("SELECT COUNT(*) FROM schema_migrations")
|
|
migration_count = cursor.fetchone()[0]
|
|
|
|
assert migration_count == 0
|
|
assert is_schema_current(conn) is True
|
|
|
|
# Manually mark migrations as applied (simulating fresh DB detection)
|
|
conn.execute(
|
|
"INSERT INTO schema_migrations (migration_name) VALUES (?)",
|
|
("001_add_code_verifier_to_auth_state.sql",)
|
|
)
|
|
conn.execute(
|
|
"INSERT INTO schema_migrations (migration_name) VALUES (?)",
|
|
("003_remove_code_verifier_from_auth_state.sql",)
|
|
)
|
|
conn.commit()
|
|
|
|
# Verify migrations were marked but NOT executed
|
|
applied = get_applied_migrations(conn)
|
|
assert "001_add_code_verifier_to_auth_state.sql" in applied
|
|
assert "003_remove_code_verifier_from_auth_state.sql" in applied
|
|
|
|
# Table should NOT have code_verifier column (current schema after Phase 1)
|
|
cursor = conn.execute("PRAGMA table_info(auth_state)")
|
|
columns = [row[1] for row in cursor.fetchall()]
|
|
assert 'code_verifier' not in columns
|
|
assert 'state' in columns
|
|
assert 'expires_at' in columns
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_run_migrations_legacy_database(self, legacy_db_basic, temp_migrations_dir):
|
|
"""Test legacy database scenario - migration should execute"""
|
|
# Create a migration to add a test column
|
|
migration_file = temp_migrations_dir / "001_add_test_column.sql"
|
|
migration_file.write_text(
|
|
"ALTER TABLE auth_state ADD COLUMN test_column TEXT;"
|
|
)
|
|
|
|
conn = sqlite3.connect(legacy_db_basic)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Verify test_column doesn't exist yet
|
|
assert column_exists(conn, 'auth_state', 'test_column') is False
|
|
|
|
# Apply migration
|
|
apply_migration(conn, "001_add_test_column.sql", migration_file)
|
|
|
|
# Verify test_column was added
|
|
assert column_exists(conn, 'auth_state', 'test_column') is True
|
|
|
|
# Verify migration was recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert "001_add_test_column.sql" in applied
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_run_migrations_idempotent(self, temp_db, temp_migrations_dir):
|
|
"""Test that running migrations multiple times is safe"""
|
|
# Create a test migration
|
|
migration_file = temp_migrations_dir / "001_test.sql"
|
|
migration_file.write_text("CREATE TABLE IF NOT EXISTS test (id INTEGER PRIMARY KEY);")
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Apply migration first time
|
|
apply_migration(conn, "001_test.sql", migration_file)
|
|
|
|
# Get migrations before second run
|
|
applied_before = get_applied_migrations(conn)
|
|
|
|
# Apply again (should be skipped)
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
applied = get_applied_migrations(conn)
|
|
pending = [m for m in migrations if m[0] not in applied]
|
|
|
|
# Should be no pending migrations
|
|
assert len(pending) == 0
|
|
|
|
# Applied migrations should be unchanged
|
|
applied_after = get_applied_migrations(conn)
|
|
assert applied_before == applied_after
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_run_migrations_multiple_files(self, temp_db, temp_migrations_dir):
|
|
"""Test applying multiple migrations in order"""
|
|
# Create multiple migrations
|
|
(temp_migrations_dir / "001_first.sql").write_text(
|
|
"CREATE TABLE first (id INTEGER PRIMARY KEY);"
|
|
)
|
|
(temp_migrations_dir / "002_second.sql").write_text(
|
|
"CREATE TABLE second (id INTEGER PRIMARY KEY);"
|
|
)
|
|
(temp_migrations_dir / "003_third.sql").write_text(
|
|
"CREATE TABLE third (id INTEGER PRIMARY KEY);"
|
|
)
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Apply all migrations
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
for migration_name, migration_path in migrations:
|
|
apply_migration(conn, migration_name, migration_path)
|
|
|
|
# Verify all tables were created
|
|
assert table_exists(conn, 'first')
|
|
assert table_exists(conn, 'second')
|
|
assert table_exists(conn, 'third')
|
|
|
|
# Verify all migrations were recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert len(applied) == 3
|
|
assert "001_first.sql" in applied
|
|
assert "002_second.sql" in applied
|
|
assert "003_third.sql" in applied
|
|
finally:
|
|
conn.close()
|
|
|
|
def test_run_migrations_partial_applied(self, temp_db, temp_migrations_dir):
|
|
"""Test applying only pending migrations when some are already applied"""
|
|
# Create multiple migrations
|
|
(temp_migrations_dir / "001_first.sql").write_text(
|
|
"CREATE TABLE first (id INTEGER PRIMARY KEY);"
|
|
)
|
|
(temp_migrations_dir / "002_second.sql").write_text(
|
|
"CREATE TABLE second (id INTEGER PRIMARY KEY);"
|
|
)
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Apply first migration
|
|
migrations = discover_migration_files(temp_migrations_dir)
|
|
apply_migration(conn, migrations[0][0], migrations[0][1])
|
|
|
|
# Verify only first table exists
|
|
assert table_exists(conn, 'first')
|
|
assert not table_exists(conn, 'second')
|
|
|
|
# Apply pending migrations
|
|
applied = get_applied_migrations(conn)
|
|
for migration_name, migration_path in migrations:
|
|
if migration_name not in applied:
|
|
apply_migration(conn, migration_name, migration_path)
|
|
|
|
# Verify second table now exists
|
|
assert table_exists(conn, 'second')
|
|
|
|
# Verify both migrations recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert len(applied) == 2
|
|
finally:
|
|
conn.close()
|
|
|
|
|
|
class TestRealMigration:
|
|
"""Test with actual migration files from the project"""
|
|
|
|
def test_actual_migration_003(self, temp_db):
|
|
"""Test the actual 003 migration file (remove code_verifier)"""
|
|
# Get the actual migration file
|
|
project_root = Path(__file__).parent.parent
|
|
migration_file = project_root / "migrations" / "003_remove_code_verifier_from_auth_state.sql"
|
|
|
|
if not migration_file.exists():
|
|
pytest.skip("Migration file 003_remove_code_verifier_from_auth_state.sql not found")
|
|
|
|
conn = sqlite3.connect(temp_db)
|
|
try:
|
|
create_migrations_table(conn)
|
|
|
|
# Create auth_state table WITH code_verifier (pre-migration state)
|
|
conn.execute("""
|
|
CREATE TABLE auth_state (
|
|
state TEXT PRIMARY KEY,
|
|
code_verifier TEXT NOT NULL DEFAULT '',
|
|
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
expires_at TIMESTAMP NOT NULL,
|
|
redirect_uri TEXT
|
|
)
|
|
""")
|
|
conn.execute("CREATE INDEX idx_auth_state_expires ON auth_state(expires_at)")
|
|
conn.commit()
|
|
|
|
# Verify starting state (has code_verifier)
|
|
assert column_exists(conn, 'auth_state', 'code_verifier') is True
|
|
|
|
# Apply migration
|
|
apply_migration(
|
|
conn,
|
|
"003_remove_code_verifier_from_auth_state.sql",
|
|
migration_file
|
|
)
|
|
|
|
# Verify end state (no code_verifier)
|
|
assert column_exists(conn, 'auth_state', 'code_verifier') is False
|
|
# Other columns should still exist
|
|
assert column_exists(conn, 'auth_state', 'state') is True
|
|
assert column_exists(conn, 'auth_state', 'redirect_uri') is True
|
|
|
|
# Verify migration recorded
|
|
applied = get_applied_migrations(conn)
|
|
assert "003_remove_code_verifier_from_auth_state.sql" in applied
|
|
finally:
|
|
conn.close()
|