diff --git a/2017/.gitignore b/2017/.gitignore
new file mode 100644
index 0000000..900eac3
--- /dev/null
+++ b/2017/.gitignore
@@ -0,0 +1,2 @@
+/.idea/*
+!/.idea/runConfigurations
diff --git a/2017/docker-compose.yml b/2017/docker-compose.yml
new file mode 100644
index 0000000..6f3e4d4
--- /dev/null
+++ b/2017/docker-compose.yml
@@ -0,0 +1,19 @@
+version: "3.9"
+
+services:
+  postgres:
+    image: postgres:15
+    container_name: "aoc-2017-postgres"
+    ports:
+      - "127.0.0.1:2017:5432"
+    volumes:
+      - postgres:/var/lib/postgresql/data
+      - .:/aoc:ro
+    environment:
+      POSTGRES_USER: postgres
+      POSTGRES_PASSWORD: aoc2017
+      POSTGRES_DB: postgres
+
+volumes: 
+  postgres:
+    name: "aoc-2017-postgres"
diff --git a/2017/utils/procedures.sql b/2017/utils/procedures.sql
new file mode 100644
index 0000000..dcf0b4e
--- /dev/null
+++ b/2017/utils/procedures.sql
@@ -0,0 +1,45 @@
+CREATE OR REPLACE PROCEDURE aoc_drop_tables(day TEXT) AS
+$$
+DECLARE
+	row RECORD;
+BEGIN
+	FOR row IN
+		SELECT table_name
+		FROM information_schema.tables
+		WHERE table_name LIKE (day || '.%') AND table_schema = CURRENT_SCHEMA()
+	LOOP
+		EXECUTE FORMAT('DROP TABLE %I CASCADE', row.table_name);
+	END LOOP;
+END
+$$ LANGUAGE plpgsql;
+
+CREATE OR REPLACE PROCEDURE aoc_setup_tables(day TEXT, input_columns TEXT) AS
+$$
+BEGIN
+	CALL aoc_drop_tables(day);
+	EXECUTE FORMAT('CREATE TABLE %I (%s)', day || '.input', input_columns);
+	EXECUTE FORMAT('CREATE TABLE %I (part INT, result TEXT NULL)', day || '.output');
+	EXECUTE FORMAT('INSERT INTO %I (part) VALUES (1), (2)', day || '.output');
+END
+$$ LANGUAGE plpgsql;
+
+CREATE OR REPLACE FUNCTION aoc_results(day TEXT)
+	RETURNS TABLE (
+		RESULT TEXT
+	)
+	STABLE
+	ROWS 2
+AS
+$$
+BEGIN
+	RETURN QUERY EXECUTE FORMAT('SELECT CONCAT(''Part '', part, '' : '', result) FROM %I WHERE result IS NOT NULL ORDER BY part', day || '.output');
+END
+$$ LANGUAGE plpgsql;
+
+CREATE OR REPLACE PROCEDURE aoc_text_file(day TEXT) AS
+$$
+BEGIN
+	CALL aoc_setup_tables(day, 'input TEXT');
+	EXECUTE FORMAT('COPY %I FROM ''/aoc/%s/input.txt''', day || '.input', day);
+END
+$$ LANGUAGE plpgsql;
diff --git a/README.md b/README.md
index f9b2540..694b615 100644
--- a/README.md
+++ b/README.md
@@ -28,6 +28,28 @@ The `2020` folder contains a Cargo project (`Cargo.toml`) that sets up every day
 
 You should be able to load the Cargo project into [CLion](https://www.jetbrains.com/clion/).
 
+## \[2017\] PostgreSQL
+
+The `2017` folder contains a Docker Compose file (`docker-compose.yml`) that launches a local PostgreSQL instance on `127.0.0.1:2017`, with the username `postgres` and password `aoc2017`. The container has the `2017` folder mounted to `/aoc`, so that PostgreSQL can see the input files.
+
+To start the Docker container, enter the `2017` folder and run `docker compose up -d`. To stop and remove the Docker container and its data, run `docker compose down -v`.
+
+You can execute the script for each day and get its output by running the following command. See [psql](https://www.postgresql.org/docs/current/app-psql.html) for the documentation of arguments and flags passed to the `psql` program.
+```
+# First, execute procedures.sql to set up procedures for turning input files into tables.
+docker exec aoc-2017-postgres psql postgres postgres -f /aoc/utils/procedures.sql
+
+# Substitute <day> for the specific day you want to run.
+docker exec aoc-2017-postgres psql postgres postgres -Atqf /aoc/<day>/main.sql
+
+# For example:
+docker exec aoc-2017-postgres psql postgres postgres -Atqf /aoc/01/main.sql
+```
+
+**Every day's script begins by dropping all tables whose name begins with that day.** Don't execute these scripts on any database you care about.
+
+You should be able to load the `2017` folder into [DataGrip](https://www.jetbrains.com/datagrip/), where you can attach the PostgreSQL data source, execute the scripts, and explore the tables created in the process.
+
 ## \[2015\] NASM x64 Assembly
 
 The `2015` folder contains a CMake project (`CMakeLists.txt`), which sets up every day as a CMake subproject.