From 792ad0f10cf1ee0e26e1e6fbd87ac73f1ae78f77 Mon Sep 17 00:00:00 2001 From: Drew Bednar Date: Mon, 26 Jan 2026 13:31:07 -0500 Subject: [PATCH] Preparing for integration testing --- Makefile | 3 +- cmd/api/handlers_integration_test.go | 18 +++ cmd/api/handlers_test.go | 117 ++++++++++++++++- docker-compose.yaml | 2 +- internal/testutil/db.go | 128 +++++++++++++++++++ migrations/000001_create_movies_table.up.sql | 2 +- 6 files changed, 264 insertions(+), 6 deletions(-) create mode 100644 cmd/api/handlers_integration_test.go create mode 100644 internal/testutil/db.go diff --git a/Makefile b/Makefile index 5940591..808604f 100644 --- a/Makefile +++ b/Makefile @@ -18,9 +18,8 @@ test-short: go test -short $(FLAGS) ./... .PHONY: test-short -# make test-int ARGS=no-cache test-int: - go test $(FLAGS) ./cmd/... + go test $(FLAGS) -count=1 ./cmd/api/handlers_integration_test.go -integration-handlers .PHONY: test-int ## Coverage See also -covermode=count and -covermode=atomic diff --git a/cmd/api/handlers_integration_test.go b/cmd/api/handlers_integration_test.go new file mode 100644 index 0000000..580ecdb --- /dev/null +++ b/cmd/api/handlers_integration_test.go @@ -0,0 +1,18 @@ +package main + +import ( + "flag" + "testing" +) + +var runIntegrationTestHandlers bool + +func init() { + flag.BoolVar(&runIntegrationTestHandlers, "integration-handlers", false, "run integration tests for http handlers") +} + +func TestHttpListHandlers(t *testing.T) { + if !runIntegrationTestHandlers { + t.Skip("Skipping handler integration tests") + } +} diff --git a/cmd/api/handlers_test.go b/cmd/api/handlers_test.go index 66bc4b8..dca1575 100644 --- a/cmd/api/handlers_test.go +++ b/cmd/api/handlers_test.go @@ -20,6 +20,9 @@ import ( "github.com/pashagolub/pgxmock/v4" ) +var defaultFixedTime = time.Date(2026, 1, 25, 10, 10, 40, 0, time.UTC) +var getAllReturnColumns = []string{"id", "created_at", "title", "year", "runtime", "genres", "version"} + func newTestApplication(pool database.PgxIface) application { cfg := config.ServiceConfig{Env: "test"} mockModels := data.NewModels(pool) @@ -372,7 +375,6 @@ func TestListHandlerServerError(t *testing.T) { respRec := httptest.NewRecorder() mockPool, err := pgxmock.NewPool() assert.NilError(t, err) - getAllReturnColumns := []string{"id", "created_at", "title", "year", "runtime", "genres", "version"} errorRows := pgxmock.NewRows(getAllReturnColumns).AddRow(1, time.Now(), "will error", 2026, 120, []string{}, 1).RowError(0, fmt.Errorf("network connection lost")) mockPool.ExpectQuery("SELECT").WillReturnRows(errorRows) @@ -391,7 +393,6 @@ func TestListHandlerValidation(t *testing.T) { assert.NilError(t, err) defer mockPool.Close() - getAllReturnColumns := []string{"id", "created_at", "title", "year", "runtime", "genres", "version"} getAllQuery := ` SELECT id, created_at, title, year, runtime, genres, version FROM movies @@ -515,3 +516,115 @@ func TestListHandlerValidation(t *testing.T) { } } + +// BAD! This passes because the filtering is being done +// in SQL DB, and since we are mocking it this just agrees +// with what we put in. This needs to be made into an integration +// test with a database. +func TestListMoviesFilters(t *testing.T) { + + movies := []data.Movie{{ + ID: 1337, + CreatedAt: defaultFixedTime, + Title: "Batteries Not Included", + Year: 1987, + Runtime: data.Runtime(120), + Genres: []string{"family", "comedy"}, + Version: 1, + }, { + ID: 1338, + CreatedAt: defaultFixedTime, + Title: "The Boy and the Heron", + Year: 2023, + Runtime: data.Runtime(140), + Genres: []string{"animation", "drama"}, + Version: 1, + }, { + ID: 1339, + CreatedAt: defaultFixedTime, + Title: "The Boy and the robber", + Year: 2022, + Runtime: data.Runtime(140), + Genres: []string{"action", "drama"}, + Version: 1, + }, + } + + testTable := []struct { + name string + query string + expextedMovies []data.Movie + }{ + { + name: "no filters", + query: "/v1/movies", + expextedMovies: movies, + }, + { + name: "filter by genre drama", + query: "/v1/movies?genre=drama", + expextedMovies: movies[1:], + }, + { + name: "filter by year", + query: "/v1/movies?year=1987", + expextedMovies: []data.Movie{movies[0]}, + }, + { + name: "filter by title", + query: "/v1/movies?title=the+boy", + expextedMovies: movies[1:], + }, + { + name: "filter sort id ascending", + query: "/v1/movies?sort=id", + expextedMovies: movies, + }, + { + name: "filter sort id descending", + query: "/v1/movies?sort=-id", + expextedMovies: []data.Movie{movies[2], movies[1], movies[0]}, + }, + { + name: "filter sort by year descending", + query: "/v1/movies?sort=year", + expextedMovies: []data.Movie{movies[0], movies[2], movies[1]}, + }, + } + + for _, test := range testTable { + t.Run(test.name, func(t *testing.T) { + respRec := httptest.NewRecorder() + mockPool, err := pgxmock.NewPool() + rows := pgxmock.NewRows(getAllReturnColumns) + + for _, m := range test.expextedMovies { + rows.AddRow(m.ID, m.CreatedAt, m.Title, m.Year, m.Runtime, m.Genres, m.Version) + } + + mockPool.ExpectQuery("SELECT"). + WillReturnRows(rows) + + assert.NilError(t, err) + + r, err := http.NewRequest(http.MethodGet, test.query, nil) + assert.NilError(t, err) + app := newTestApplication(mockPool) + app.routes().ServeHTTP(respRec, r) + + resp := respRec.Result() + assert.Equal(t, resp.StatusCode, http.StatusOK) + body, err := io.ReadAll(resp.Body) + assert.NilError(t, err) + defer resp.Body.Close() + + jsonContent := make(map[string][]data.Movie) + json.Unmarshal(body, &jsonContent) + + for i := range jsonContent["movies"] { + assert.MovieEqual(t, jsonContent["movies"][i], test.expextedMovies[i]) + } + + }) + } +} diff --git a/docker-compose.yaml b/docker-compose.yaml index b861b0a..0ecdb4c 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -4,7 +4,7 @@ services: container_name: pulleydb environment: POSTGRES_USER: pulley - POSTGRES_PASSWORD: passwd + POSTGRES_PASSWORD: pulley POSTGRES_DB: pulley ports: - "5434:5432" # Host:Container diff --git a/internal/testutil/db.go b/internal/testutil/db.go new file mode 100644 index 0000000..3dcc90e --- /dev/null +++ b/internal/testutil/db.go @@ -0,0 +1,128 @@ +package testutil + +import ( + "context" + "database/sql" + "fmt" + "net/url" + "os" + "testing" + "time" + + "git.runcible.io/learning/pulley/internal/config" + "git.runcible.io/learning/pulley/internal/database" + "git.runcible.io/learning/pulley/migrations" + "github.com/jackc/pgx/v5/pgxpool" +) + +// SetupTestDB provides a connection pool to a freshly created temporary database. +// +// Environment Variables: +// - PULLEY_INTEGRATION_DATABASE_URI: The connection string for the admin/maintenance DB. +// Defaults to: postgres://pulley:pulley@localhost:5432/postgres?sslmode=disable +// - PULLEY_ALLOW_REMOTE_INTEGRATION_TEST: Must be 'true' if the URI host is not 'localhost' or '127.0.0.1'. +// +// Safety: +// +// If the resolved URI points to a remote host and the safety flag is not enabled, the test +// will fail immediately to prevent accidental execution against production or staging environments. +// +// Lifecycle: +// +// The function uses t.Cleanup to automatically close the pool and drop the temporary +// database, ensuring no resource leaks between test runs. +func SetupTestDB(t *testing.T) *pgxpool.Pool { + t.Helper() + + ctxTimeout, cancel := context.WithTimeout(context.Background(), 10*time.Second) + defer cancel() + + intDBUri, ok := os.LookupEnv("PULLEY_INTEGRATION_DATABASE_URI") + if !ok { + intDBUri = "postgres://pulley:pulley@localhost:5434/postgres?sslmode=disable" + t.Log("PULLEY_INTEGRATION_DATABASE_URI not set. Using localhost default") + } + + // check for non-local hostname + parsed, err := url.Parse(intDBUri) + if err != nil { + t.Fatal(err) + } + + if parsed.Hostname() != "localhost" && parsed.Hostname() != "127.0.0.1" { + if os.Getenv("PULLEY_ALLOW_REMOTE_INTEGRATION_TEST") != "true" { + t.Fatalf("Attempting to run integration tests against a non-local DB (%s). "+ + "Set PULLEY_ALLOW_REMOTE_INTEGRATION_TEST=true to bypass.", parsed.Hostname()) + } + } + + // verify connection + dbAdmin, err := sql.Open("pgx", intDBUri) + if err != nil { + t.Log("Hint: check if `make start-local` is running a postgres container") + t.Fatalf("Failed to connect to database: %s", err.Error()) + } + t.Cleanup(func() { + dbAdmin.Close() + }) + + err = dbAdmin.PingContext(ctxTimeout) + if err != nil { + t.Log("Hint: check if `make start-local` is running a postgres container") + t.Fatalf("Failed to connect to database: %s", err) + } + + // create and migrate new db + testDbName := fmt.Sprintf("test_db_%d", time.Now().UnixNano()) + t.Logf("Creating integration test database: %s", testDbName) + query := fmt.Sprintf("CREATE DATABASE %s", testDbName) + + _, err = dbAdmin.ExecContext(ctxTimeout, query) + if err != nil { + t.Fatalf("Created to create database %s: %s", testDbName, err) + } + + testCreds := *parsed + testCreds.Path = fmt.Sprintf("/%s", testDbName) + + // migrations + migrateDb, err := sql.Open("pgx", testCreds.String()) + if err != nil { + t.Fatalf("Failed to open migrations connection to %s: %s", testDbName, err) + } + defer migrateDb.Close() + + err = migrateDb.PingContext(ctxTimeout) + if err != nil { + t.Fatalf("Failed to connect to database: %s", err) + } + + t.Logf("Migrating database %s", testDbName) + err = migrations.Migrate(migrateDb) + if err != nil { + t.Fatalf("Failed to migrate %s: %s", testDbName, err) + } + migrateDb.Close() + + testPool, err := database.OpenPgPool(ctxTimeout, config.ServiceConfig{DatabaseUri: testCreds.String()}) + + t.Cleanup(func() { + t.Logf("Ensuring test pool is closed") + testPool.Close() + // TODO envvar check could be beneficial if DB needs to be inspected + // because of failure + t.Logf("Cleaning up database %s", testDbName) + + ctxCleanup, cleanupCancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cleanupCancel() + + query := fmt.Sprintf("DROP DATABASE IF EXISTS %s WITH (FORCE)", testDbName) + + _, err = dbAdmin.ExecContext(ctxCleanup, query) + if err != nil { + t.Errorf("Error in dropping database %s", testDbName) + } + }) + + return testPool +} diff --git a/migrations/000001_create_movies_table.up.sql b/migrations/000001_create_movies_table.up.sql index aef9d5c..312223d 100644 --- a/migrations/000001_create_movies_table.up.sql +++ b/migrations/000001_create_movies_table.up.sql @@ -4,7 +4,7 @@ title text NOT NULL, year integer NOT NULL, runtime integer NOT NULL, - + genres text[] NOT NULL, version integer NOT NULL DEFAULT 1 );