🐛 fix(bdd): revert PR #26 schema isolation, add cache flush + sequential test execution
All checks were successful
CI/CD Pipeline / Build Docker Cache (push) Successful in 12s
CI/CD Pipeline / CI Pipeline (push) Successful in 8m44s
CI/CD Pipeline / Trigger Docker Push (push) Has been skipped

PR #26 added BDD_SCHEMA_ISOLATION=true to CI but this creates per-scenario schemas
WITHOUT running migrations on them, causing 500 errors on user registration. This
PR reverts that and instead relies on:

1. The existing CleanupDatabase hook (truncates all tables AfterScenario)
2. Sequential test package execution (-p 1) to avoid contention between feature
   packages sharing the same Postgres DB

Plus defensive additions for future-proofing:
- pkg/server/server.go: GetCacheService() exposed for test cleanup
- pkg/bdd/testserver/server.go: cacheService field + FlushCache() method
- pkg/bdd/testserver/state_tracer.go: TraceStateCacheOperation
- pkg/bdd/suite.go: AfterScenario hook calls FlushCache()
- scripts/run-bdd-tests.sh: -p 1 added (sequential package execution)

Validation:
- AuthBDD alone: 5/5 PASS (was 0/5 with broken schema isolation)
- Full features/ via run-bdd-tests.sh: ALL PASS (auth, config, greet, health, jwt)

Out of scope (follow-up T12):
- Proper parallel BDD with schema migrations per scenario + dedicated connection
  pools. Required for scaling tests but architecturally significant. Tracked.

Co-Authored-By: Mistral Vibe (devstral-2 / mistral-medium-3.5) - cache flush diagnosis
Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com> - root cause + revert
This commit is contained in:
2026-05-03 16:28:21 +02:00
parent 11fefe3bd9
commit 63d27cc35e
6 changed files with 50 additions and 6 deletions

View File

@@ -15,6 +15,7 @@ import (
"sync"
"time"
"dance-lessons-coach/pkg/cache"
"dance-lessons-coach/pkg/config"
"dance-lessons-coach/pkg/server"
"dance-lessons-coach/pkg/user"
@@ -48,6 +49,7 @@ type Server struct {
baseURL string
db *sql.DB
authService user.AuthService // Reference to auth service for cleanup
cacheService cache.Service // Reference to cache service for cleanup
schemaMutex sync.Mutex // Protects schema operations
currentSchema string // Current schema being used
originalSearchPath string // Original search_path to restore
@@ -153,6 +155,9 @@ func (s *Server) Start() error {
// Store auth service for cleanup
s.authService = realServer.GetAuthService()
// Store cache service for cleanup
s.cacheService = realServer.GetCacheService()
// Initialize database connection for cleanup
if err := s.initDBConnection(); err != nil {
return fmt.Errorf("failed to initialize database connection: %w", err)
@@ -409,6 +414,23 @@ func (s *Server) ResetJWTSecrets() error {
return nil
}
// FlushCache clears all cached data to prevent cache pollution between scenarios
// This prevents cached responses from affecting subsequent test scenarios
func (s *Server) FlushCache() error {
if s.cacheService == nil {
if isCleanupLoggingEnabled() {
log.Info().Msg("CLEANUP: No cache service available, skipping cache flush")
}
return nil
}
s.cacheService.Flush()
if isCleanupLoggingEnabled() {
log.Info().Msg("CLEANUP: Cache flushed successfully")
}
return nil
}
// CleanupDatabase deletes all test data from all tables
// This uses raw SQL to avoid dependency on repositories and handles foreign keys properly
// Uses SET CONSTRAINTS ALL DEFERRED to temporarily disable foreign key checks
@@ -555,7 +577,7 @@ func (s *Server) SetupScenarioSchema(feature, scenario string) error {
return fmt.Errorf("failed to create schema %s: %w", schemaName, err)
}
// Set search path to use the new schema
// Set search path to use the new schema (testserver's own connection)
searchPathSQL := fmt.Sprintf("SET search_path = %s, %s", schemaName, s.originalSearchPath)
if _, err := s.db.Exec(searchPathSQL); err != nil {
return fmt.Errorf("failed to set search_path: %w", err)