Compare commits

..

53 Commits

Author SHA1 Message Date
Alphonse Paix
6d2d486866 Responsive navbar
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 05:05:49 +02:00
Alphonse Paix
40dfe1aed8 Templates refactoring
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 04:43:55 +02:00
Alphonse Paix
b52b676dc0 Error handling refactor and 500 page/message templates 2025-09-20 04:06:48 +02:00
Alphonse Paix
f5cd91108a Refactor admin routes to use new AppError struct in responses 2025-09-20 01:08:05 +02:00
Alphonse Paix
01d2add44b Askama message template 2025-09-20 00:51:46 +02:00
Alphonse Paix
46a6905b12 Logo in the navbar
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 00:20:58 +02:00
Alphonse Paix
91e8b5f001 Templates adjustments
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-19 20:07:38 +02:00
Alphonse Paix
a75c410948 404 page 2025-09-19 19:34:06 +02:00
Alphonse Paix
95c4d3fdd0 Posts dedicated page with cards linking to specific post 2025-09-19 01:04:10 +02:00
Alphonse Paix
71d4872878 Refactor test suite to handle 303 See Other 2025-09-18 21:00:01 +02:00
Alphonse Paix
3120c700a4 query metadata 2025-09-18 20:09:03 +02:00
Alphonse Paix
08d5f611b5 Brought back newsletter form on admin page 2025-09-18 18:40:03 +02:00
Alphonse Paix
54218f92a9 Admin can now write posts
Posts can be displayed on the website. Subscribers are automatically
notified by email. This gives the opportunity to track explicitly how
many people followed the link provided in the emails sent without being
intrusive (no invisible image).
2025-09-18 17:22:33 +02:00
Alphonse Paix
044991d623 Fix redirect issues
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
Dashboard button now correctly redirects to login page if user is not
logged in while login page redirects to dashboard the other way around.
2025-09-17 23:57:45 +02:00
Alphonse Paix
72d0306e35 Update README
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-17 14:35:39 +02:00
Alphonse Paix
e191d35664 Formatting 2025-09-17 14:24:45 +02:00
Alphonse Paix
b5f0f448d7 Test suite refactoring to match new htmx HTML swapping in pages 2025-09-17 14:16:27 +02:00
Alphonse Paix
859247d900 HX-Redirect to handle redirections with htmx 2025-09-17 13:16:56 +02:00
Alphonse Paix
2d336ed000 Use HTML swap to display success and error messages 2025-09-17 03:40:23 +02:00
Alphonse Paix
88dad022ce Basic dashboard for newsletter issue and password systems 2025-09-17 01:47:03 +02:00
Alphonse Paix
1d027b5460 htmx and Tailwind CSS production setup 2025-09-16 20:30:34 +02:00
Alphonse Paix
38208654dc Run on port 8080 for local env + minor fix for subscription confirm page 2025-09-16 19:09:11 +02:00
Alphonse Paix
0a2c4a32c1 Update README
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-16 16:53:45 +02:00
Alphonse Paix
b736e2fe8d Confirmation page and minor improvements to homepage and form messages
Basic redirect with flash messages for success and error messages
2025-09-16 16:47:28 +02:00
Alphonse Paix
f948728348 Merge remote-tracking branch 'origin/main' into askama 2025-09-16 15:25:28 +02:00
Alphonse Paix
5cdc3ea29d Remove name from subscriptions table 2025-09-16 15:24:08 +02:00
Alphonse Paix
56035fab30 Askama + htmx for frontend
Server-side rendering with htmx and Tailwind CSS for the styling
2025-09-16 01:47:18 +02:00
Alphonse Paix
a9c6cb36a5 Update config to use self-hosted email server 2025-09-14 19:29:02 +00:00
Alphonse Paix
ba6b2dbd93 Better datastores security
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
Localhost only exposed ports and stronger password for Postgres local
environment instance
2025-09-07 12:46:27 +02:00
Alphonse Paix
1ec51f0caf Update workflow to include redis 2025-09-05 19:23:11 +02:00
Alphonse Paix
54b0512f3f Use env vars for TLS files 2025-09-05 18:27:46 +02:00
Alphonse Paix
8d6cab41d0 Support for TLS encryption 2025-09-05 18:13:35 +02:00
Alphonse Paix
a4104ca1b2 Register form and confirmation messages 2025-09-04 23:39:53 +02:00
Alphonse Paix
f8dee295cd Fault-tolerant delivery system 2025-09-04 02:54:49 +02:00
Alphonse Paix
9a184b93ac Authentication and form for newsletter publishing 2025-09-01 15:47:27 +02:00
Alphonse Paix
d96a401d99 Admin dashboard and sessions 2025-09-01 03:08:43 +02:00
Alphonse Paix
3dce578ba0 Flash messages using axum-messages 2025-08-30 01:39:12 +02:00
Alphonse Paix
8447d050d6 Handler to send emails to confirmed subscribers 2025-08-27 12:14:11 +02:00
Alphonse Paix
9193f2020d Error handling with thiserror and anyhow 2025-08-26 12:47:22 +02:00
Alphonse Paix
4ce25a8136 Fix send email request body 2025-08-25 18:51:37 +02:00
Alphonse Paix
dfd3300371 Query metadata 2025-08-25 17:52:44 +02:00
Alphonse Paix
d1cf1f6c4f Confirm subscription endpoint 2025-08-25 17:46:03 +02:00
Alphonse Paix
73ff7c04fe Query metadata, migrations and formatting 2025-08-24 12:45:32 +02:00
Alphonse Paix
954772e9db Set status to 'confirmed' for new subscribers 2025-08-24 11:43:08 +02:00
Alphonse Paix
4389873bf4 Email client, application startup logic and tests 2025-08-24 11:31:03 +02:00
Alphonse Paix
85ab04f254 Parse data from incoming request 2025-08-23 11:13:57 +02:00
Alphonse Paix
4d049a744a Fix bug when reading environment variables 2025-08-22 16:29:11 +02:00
Alphonse Paix
a7473bb7f5 Environment variables at runtime to connect to database 2025-08-22 16:01:20 +02:00
Alphonse Paix
1567f94b1f Docker for deployment 2025-08-22 14:25:34 +02:00
Alphonse Paix
59817083eb Update GitHub workflow 2025-08-22 08:44:02 +02:00
Alphonse Paix
b280f10c40 Fix incorrect database query in test suite 2025-08-22 08:28:22 +02:00
Alphonse Paix
5cc5758097 Telemetry 2025-08-22 08:14:59 +02:00
Alphonse Paix
ded2a611e2 Database connection and user registration 2025-08-21 15:38:12 +02:00
165 changed files with 1432 additions and 7554 deletions

View File

@@ -1,3 +1,3 @@
[target.x86_64-unknown-linux-gnu]
linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
rustflags = ["-C", "link-arg=-fuse-ld=/usr/bin/mold"]

View File

@@ -4,9 +4,3 @@
Dockerfile
/scripts
/migrations
/node_modules
/assets/css/main.css
/.github
README.md
/tests
/configuration/local.yaml

1
.env Normal file
View File

@@ -0,0 +1 @@
DATABASE_URL="postgres://postgres:Jq09NF6Y8ZXJS4jd9c8U@localhost:5432/newsletter"

View File

@@ -1,6 +1,11 @@
# The name of your workflow. GitHub displays the names of your workflows on your repository's "Actions" tab
name: Rust
# To automatically trigger the workflow
on:
# NB: this differs from the book's project!
# These settings allow us to run this specific CI pipeline for PRs against
# this specific branch (a.k.a. book chapter).
push:
branches:
- main
@@ -13,51 +18,74 @@ env:
CARGO_TERM_COLOR: always
SQLX_VERSION: 0.8.6
SQLX_FEATURES: "rustls,postgres"
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter
APP_DATABASE__HOST: postgres
APP_KV_STORE__HOST: redis
APP_USER: app
APP_USER_PWD: secret
APP_DB_NAME: newsletter
# A workflow run is made up of one or more jobs, which run in parallel by default
# Each job runs in a runner environment specified by runs-on
jobs:
# Unique identifier of our job (`job_id`)
test:
# Sets the name `Test` for the job, which is displayed in the GitHub UI
name: Test
# Containers must run in Linux based operating systems
runs-on: ubuntu-latest
# Service containers to run alongside the `test` container job
services:
# Label used to access the service container
postgres:
# Docker Hub image
image: postgres
# Environment variables scoped only for the `postgres` element
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: newsletter
POSTGRES_DB: postgres
# When you map ports using the ports keyword, GitHub uses the --publish command to publish the containers ports to the Docker host
# Opens tcp port 5432 on the host and service container
ports:
- 15432:5432
- 5432:5432
redis:
image: redis
image: redis:7
ports:
- 16379:6379
- 6379:6379
steps:
# Downloads a copy of the code in your repository before running CI tests
- name: Check out repository code
# The uses keyword specifies that this step will run v4 of the actions/checkout action.
# This is an action that checks out your repository onto the runner, allowing you to run scripts or other actions against your code (such as build and test tools).
# You should use the checkout action any time your workflow will run against the repository's code.
uses: actions/checkout@v4
- name: Install mold linker
run: |
sudo apt-get update
sudo apt-get install -y mold clang
# This GitHub Action installs a Rust toolchain using rustup. It is designed for one-line concise usage and good defaults.
# It also takes care of caching intermediate build artifacts.
- name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
cache: false
- name: Install sqlx-cli
run: cargo install sqlx-cli
--version=${{ env.SQLX_VERSION }}
--features ${{ env.SQLX_FEATURES }}
--no-default-features
--locked
- name: Create app user in Postgres
run: |
sudo apt-get install postgresql-client
# Create the application user
CREATE_QUERY="CREATE USER ${APP_USER} WITH PASSWORD '${APP_USER_PWD}';"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${CREATE_QUERY}"
# Grant create db privileges to the app user
GRANT_QUERY="ALTER USER ${APP_USER} CREATEDB;"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${GRANT_QUERY}"
- name: Migrate database
run: cargo sqlx migrate run
run: SKIP_DOCKER=true ./scripts/init_db.sh
- name: Run tests
run: TEST_LOG=true cargo test
run: cargo test
- name: Check that queries are fresh
run: cargo sqlx prepare --check --workspace
# `fmt` container job
fmt:
name: Rustfmt
runs-on: ubuntu-latest
@@ -67,30 +95,31 @@ jobs:
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: rustfmt
cache: false
- name: Enforce formatting
run: cargo fmt --check
# `clippy` container job
clippy:
name: Clippy
runs-on: ubuntu-latest
env:
# This environment variable forces sqlx to use its offline mode,
# which means that it will not attempt to connect to a database
# when running the tests. It'll instead use the cached query results.
# We check that the cached query results are up-to-date in another job,
# to speed up the overall CI pipeline.
# This will all be covered in detail in chapter 5.
SQLX_OFFLINE: true
steps:
- name: Check out repository code
uses: actions/checkout@v4
- name: Install mold linker
run: |
sudo apt-get update
sudo apt-get install -y mold clang
- uses: actions/checkout@v4
- name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: clippy
cache: false
- name: Linting
run: cargo clippy -- -D warnings
# `coverage` container job
coverage:
name: Code coverage
runs-on: ubuntu-latest
@@ -100,35 +129,45 @@ jobs:
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: newsletter
POSTGRES_DB: postgres
ports:
- 15432:5432
- 5432:5432
redis:
image: redis
image: redis:7
ports:
- 16379:6379
- 6379:6379
steps:
- uses: actions/checkout@v4
- name: Install mold linker
run: |
sudo apt-get update
sudo apt-get install -y mold clang
- name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
components: llvm-tools-preview
cache: false
- name: Install sqlx-cli
run: cargo install sqlx-cli
--version=${{ env.SQLX_VERSION }}
--features ${{ env.SQLX_FEATURES }}
--no-default-features
--locked
- name: Create app user in Postgres
run: |
sudo apt-get install postgresql-client
# Create the application user
CREATE_QUERY="CREATE USER ${APP_USER} WITH PASSWORD '${APP_USER_PWD}';"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${CREATE_QUERY}"
# Grant create db privileges to the app user
GRANT_QUERY="ALTER USER ${APP_USER} CREATEDB;"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${GRANT_QUERY}"
- name: Migrate database
run: cargo sqlx migrate run
run: SKIP_DOCKER=true ./scripts/init_db.sh
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Generate code coverage
run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Generate report
run: cargo llvm-cov report --html --output-dir coverage
- uses: actions/upload-artifact@v4
with:
name: "Coverage report"
path: coverage/

4
.gitignore vendored
View File

@@ -1,6 +1,2 @@
/target
/node_modules
.env
/.idea
docker-compose.yml

View File

@@ -1,18 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO comments (user_id, comment_id, post_id, author, content)\n VALUES ($1, $2, $3, $4, $5)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Uuid",
"Uuid",
"Text",
"Text"
]
},
"nullable": []
},
"hash": "02fff619c0ff8cb4f9946991be0ce795385b9e6697dcaa52f915acdbb1460e65"
}

View File

@@ -1,64 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.post_id, p.author_id, u.username AS author, u.full_name,\n p.title, p.content, p.published_at, last_modified\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n WHERE p.post_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
true
]
},
"hash": "059162eba48cf5f519d0d8b6ce63575ced91941b8c55c986b8c5591c7d9b09e4"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM notifications_delivered WHERE opened = TRUE",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "06f07a7522f3ee8e2cdfe5a7988a46f9a2598aa9c0618d00f6287978d5ce28ca"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT newsletter_issue_id, subscriber_email, unsubscribe_token, kind\n FROM issue_delivery_queue\n FOR UPDATE\n SKIP LOCKED\n LIMIT 1\n ",
"query": "\n SELECT newsletter_issue_id, subscriber_email\n FROM issue_delivery_queue\n FOR UPDATE\n SKIP LOCKED\n LIMIT 1\n ",
"describe": {
"columns": [
{
@@ -12,27 +12,15 @@
"ordinal": 1,
"name": "subscriber_email",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "unsubscribe_token",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "kind",
"type_info": "Text"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "3b79eca713fe7e167578537399436f5cb1171a7e89c398e005ad41ee12aaf91f"
"hash": "06f83a51e9d2ca842dc0d6947ad39d9be966636700de58d404d8e1471a260c9a"
}

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE subscriptions SET status = 'confirmed', unsubscribe_token = $1 WHERE id = $2",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Uuid"
]
},
"nullable": []
},
"hash": "0f552668ea90475e1877425d51727cfe38a9d93571283aa33e8267b42e117e6e"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM idempotency\n WHERE created_at < NOW() - INTERVAL '1 hour'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": []
},
"nullable": []
},
"hash": "1e1a90042e89bd8662df3bae15bc7506146cff102034664c77ab0fc68b9480f5"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT\n response_status_code as \"response_status_code!\",\n response_headers as \"response_headers!: Vec<HeaderPairRecord>\",\n response_body as \"response_body!\"\n FROM idempotency\n WHERE idempotency_key = $1\n ",
"query": "\n SELECT\n response_status_code as \"response_status_code!\",\n response_headers as \"response_headers!: Vec<HeaderPairRecord>\",\n response_body as \"response_body!\"\n FROM idempotency\n WHERE\n user_id = $1\n AND idempotency_key = $2\n ",
"describe": {
"columns": [
{
@@ -44,6 +44,7 @@
],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
@@ -53,5 +54,5 @@
true
]
},
"hash": "74d92b078198c3f73edc272c788249b14b62c59365d745d6a2e314cd9c5db1e9"
"hash": "1fc498c8ccbf46f3e00b915e3b3973eb8d44a83a7df6dd7744dc56a2e94a0aa5"
}

View File

@@ -1,64 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.author_id, u.username as author, u.full_name,\n p.post_id, p.title, p.content, p.published_at, p.last_modified\n FROM posts p\n INNER JOIN users u ON p.author_id = u.user_id\n WHERE p.author_id = $1\n ORDER BY p.published_at DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false,
true
]
},
"hash": "1fc92c14786c21d24951341e3a8149964533b7627d2d073eeac7b7d3230513ce"
}

View File

@@ -1,44 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, password_hash, role as \"role: Role\"\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "password_hash",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "22c9449522dcf495d9f49c16ca433aa07a0d1daae4884789ba1e36a918e7dfd1"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO idempotency (idempotency_key, created_at)\n VALUES ($1, now())\n ON CONFLICT DO NOTHING\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "3124db53d9e1fe0701a2fc70eea98e001fef4b75c24d33d8dd595f6b483e8f65"
}

View File

@@ -1,10 +1,11 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE idempotency\n SET\n response_status_code = $2,\n response_headers = $3,\n response_body = $4\n WHERE idempotency_key = $1\n ",
"query": "\n UPDATE idempotency\n SET\n response_status_code = $3,\n response_headers = $4,\n response_body = $5\n WHERE\n user_id = $1\n AND idempotency_key = $2\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text",
"Int2",
{
@@ -36,5 +37,5 @@
},
"nullable": []
},
"hash": "b64d5c2e51f328effc8f4687066db96ad695c575fb66195febcdf95c1539a153"
"hash": "32701e61ea14e25608b5f6b05289d08d422e9629d6aee98ac1dcbd50f1edbfe1"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM posts",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "3d7376ca79ffd159830fc6d43042d5fe761b6d330924bde7c5fc0f17f533def9"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM notifications_delivered",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "3f4aceeab03c1c7352d6bed39d397e17d1fc934015d53754f9b0055c4701ee21"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO idempotency (user_id, idempotency_key, created_at)\n VALUES ($1, $2, now())\n ON CONFLICT DO NOTHING\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": []
},
"hash": "409cb2c83e34fba77b76f031cb0846a8f2716d775c3748887fb0c50f0e0a565b"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM subscriptions WHERE id = $1 RETURNING email",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "email",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "4141df8c45db179016d8e87b023b572bec7e04a6f3324aa17de7e7a9b1fb32ef"
}

View File

@@ -1,25 +1,20 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT newsletter_issue_id, title, text_content, html_content\n FROM newsletter_issues\n WHERE newsletter_issue_id = $1\n ",
"query": "\n SELECT title, text_content, html_content\n FROM newsletter_issues\n WHERE newsletter_issue_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "newsletter_issue_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 2,
"ordinal": 1,
"name": "text_content",
"type_info": "Text"
},
{
"ordinal": 3,
"ordinal": 2,
"name": "html_content",
"type_info": "Text"
}
@@ -30,11 +25,10 @@
]
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "f682b1791fb9871c5f7416711caf32637d6303b2c166ef89e7f725b309d2219f"
"hash": "43116d4e670155129aa69a7563ddc3f7d01ef3689bb8de9ee1757b401ad95b46"
}

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO issue_delivery_queue (\n newsletter_issue_id,\n subscriber_email,\n unsubscribe_token,\n kind\n )\n SELECT $1, email, unsubscribe_token, $2\n FROM subscriptions\n WHERE status = 'confirmed'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": []
},
"hash": "5d9039a01feaca50218a1c791439b2bd3817582798027c00d59d43089531ecc0"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, role as \"role: Role\", full_name, bio, member_since\n FROM users\n WHERE user_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "bio",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "member_since",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
true,
true,
false
]
},
"hash": "601884180bc841dc0762008a819218620fc05169fe3bb80b7635fbe9e227056b"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM subscriptions",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "68a00cae18e40dc76ffea61dfc0ea84d8cb09502b24c11dbb8d403419899dfd1"
}

View File

@@ -1,60 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, role as \"role: Role\", full_name, bio, member_since\n FROM users\n ORDER BY member_since DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "bio",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "member_since",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
true,
true,
false
]
},
"hash": "73dbf3fb780272b1849cd8aa2ecfb59774b1c46bf52181b6298eebccbc86e438"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT login_time FROM user_logins\n WHERE user_id = $1\n ORDER BY login_time DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "login_time",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "769e8762bd2173c088d85fc132326b05a08e67092eac4c3a7aff8a49d086b5a0"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM subscriptions\n WHERE status = 'pending_confirmation'\n AND subscribed_at < NOW() - INTERVAL '24 hours'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": []
},
"nullable": []
},
"hash": "7eccf0027753bc1c42897aef12c9350eca023f3be52e24530127d06c3c449104"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT author_id FROM posts WHERE post_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "author_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "84fcada696e1be5db55ef276e120ffef9adf7f5a4f5c4d5975b85e008e15620b"
}

View File

@@ -1,27 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO users (user_id, username, password_hash, role)\n VALUES ($1, $2, $3, $4)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text",
"Text",
{
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
]
},
"nullable": []
},
"hash": "878036fa48e738387e4140d5dc7eccba477794a267f2952aab684028b7c6e286"
}

View File

@@ -1,59 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT c.user_id as \"user_id?\", u.username as \"username?\", c.comment_id, c.post_id, c.author, c.content, c.published_at\n FROM comments c\n LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL\n ORDER BY published_at DESC\n LIMIT $1\n OFFSET $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id?",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username?",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "comment_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
true,
false,
false,
false,
true,
false,
false
]
},
"hash": "886de678764ebf7f96fe683d3b685d176f0a41043c7ade8b659a9bd167a2d063"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT unsubscribe_token FROM subscriptions WHERE email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "unsubscribe_token",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
true
]
},
"hash": "8d72bcc059606a15aef7e3c2455b9cc44427356b4ab772f0f1fb3dfd318c4561"
}

View File

@@ -1,17 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE users\n SET username = $1, full_name = $2, bio = $3\n WHERE user_id = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Uuid"
]
},
"nullable": []
},
"hash": "8dc27ae224c7ae3c99c396302357514d66e843dc4b3ee4ab58c628b6c9797fdd"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM subscriptions WHERE status = 'confirmed'",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "95a6533f617e7bae589b00548c73425b2991237b8c823dd7c863e6dad002d4b6"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO issue_delivery_queue (\n newsletter_issue_id,\n subscriber_email\n )\n SELECT $1, email\n FROM subscriptions\n WHERE status = 'confirmed'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "9bfa261067713ca31b191c9f9bcf19ae0dd2d12a570ce06e8e2abd72c5d7b42d"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE notifications_delivered SET opened = TRUE WHERE email_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "9fc831553927814e21dd2aa4ff92d06c32e318c7536918d5adbaf5eaf5777e3d"
}

View File

@@ -1,47 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT * FROM subscriptions ORDER BY subscribed_at DESC LIMIT $1 OFFSET $2",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "email",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "subscribed_at",
"type_info": "Timestamptz"
},
{
"ordinal": 3,
"name": "status",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "unsubscribe_token",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
false,
true
]
},
"hash": "a6cb227efa5ac12189e662d68b8dcc39032f308f211f603dfcf539b7b071b8e3"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM users WHERE user_id = $1",
"query": "UPDATE subscriptions SET status = 'confirmed' WHERE id = $1",
"describe": {
"columns": [],
"parameters": {
@@ -10,5 +10,5 @@
},
"nullable": []
},
"hash": "dfa520877c017cd5808d02c24ef2d71938b68093974f335a4d89df91874fdaa2"
"hash": "a71a1932b894572106460ca2e34a63dc0cb8c1ba7a70547add1cddbb68133c2b"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT id FROM subscriptions WHERE email = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "aa7e732d453403819a489e1a4ac5c56cd3b57bc882c8b1e96a887811f8f999cd"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, password_hash\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "password_hash",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false
]
},
"hash": "acf1b96c82ddf18db02e71a0e297c822b46f10add52c54649cf599b883165e58"
}

View File

@@ -1,17 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE posts\n SET title = $1, content = $2, last_modified = $3 WHERE post_id = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Timestamptz",
"Uuid"
]
},
"nullable": []
},
"hash": "aef1e780d14be61aa66ae8771309751741068694b291499ee1371de693c6a654"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM posts WHERE post_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "b47161386b21432693aa3827963e8167c942e395687cd5ffecb7c064ca2dde70"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM subscriptions WHERE unsubscribe_token = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "ba8d4af43c5654ecce5e396a05681249a28bdcff206d4972f53c8cbd837f8acf"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM comments WHERE post_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "bd08bf95dc1c8c0c7678bc509df7ce776e839846f29981e2e0bdfd382de9370f"
}

View File

@@ -1,28 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT username, full_name FROM users WHERE user_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "full_name",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true
]
},
"hash": "bfd02c92fb5e0c8748b172bf59a77a477b432ada1f41090571f4fe0e685b1b1b"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM comments WHERE comment_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "caf9f2603db6bc8b715cad188501c12f5de5fae49cd04271471f1337a3232f58"
}

View File

@@ -1,65 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.post_id, p.author_id, u.username AS author, u.full_name,\n p.title, p.content, p.published_at, p.last_modified\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n ORDER BY p.published_at DESC\n LIMIT $1\n OFFSET $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
true
]
},
"hash": "dc3c1b786b4f4bd65f625922ce05eab4cb161f3de6c6e676af778f7749af5710"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, full_name, role as \"role: Role\", member_since, bio\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 4,
"name": "member_since",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
true,
false,
false,
true
]
},
"hash": "e049f4db1020c0a2979d5ee3c1c0519de59eee8594eb2e472877e5db6bf25271"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM comments",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "e056c3230c1ccd1b3b62e902f49a41f21213e0f7da92b428065986d380676034"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT user_id FROM users WHERE username = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "f4ea2ad9ba4f26093152e4a0e008ef6c3114fbe9e51301611c5633e1cc944c05"
}

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO notifications_delivered (email_id, newsletter_issue_id)\n VALUES ($1, $2)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Uuid"
]
},
"nullable": []
},
"hash": "f8afa9b469bf8c216c5855e1d6b7ee05281c9e7779f8fd6486780f882f46e385"
}

View File

@@ -1,60 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT c.user_id as \"user_id?\", u.username as \"username?\", c.comment_id, c.post_id, c.author, c.content, c.published_at\n FROM comments c\n LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL\n WHERE c.post_id = $1\n ORDER BY c.published_at DESC\n LIMIT $2\n OFFSET $3\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id?",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username?",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "comment_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid",
"Int8",
"Int8"
]
},
"nullable": [
true,
false,
false,
false,
true,
false,
false
]
},
"hash": "fb280849a8a1fce21ec52cd9df73492d965357c9a410eb3b43b1a2e1cc8a0259"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO user_logins (user_id) VALUES ($1)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "fc383671ada951baa611ab7dd00efcc7f4f2aea7c22e4c0865e5c766ed7f99b3"
}

422
Cargo.lock generated
View File

@@ -17,6 +17,19 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"getrandom 0.3.3",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.3"
@@ -227,6 +240,28 @@ dependencies = [
"syn",
]
[[package]]
name = "axum-server"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "495c05f60d6df0093e8fb6e74aa5846a0ad06abaf96d76166283720bf740f8ab"
dependencies = [
"arc-swap",
"bytes",
"fs-err",
"http",
"http-body",
"hyper",
"hyper-util",
"pin-project-lite",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
]
[[package]]
name = "backtrace"
version = "0.3.75"
@@ -529,29 +564,6 @@ dependencies = [
"typenum",
]
[[package]]
name = "cssparser"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e901edd733a1472f944a45116df3f846f54d37e67e68640ac8bb69689aca2aa"
dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf",
"smallvec",
]
[[package]]
name = "cssparser-macros"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "darling"
version = "0.20.11"
@@ -626,26 +638,6 @@ dependencies = [
"serde",
]
[[package]]
name = "derive_more"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "deunicode"
version = "1.6.2"
@@ -699,27 +691,6 @@ version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dtoa"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04"
[[package]]
name = "dtoa-short"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87"
dependencies = [
"dtoa",
]
[[package]]
name = "ego-tree"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2972feb8dffe7bc8c5463b1dacda1b0dfbed3710e50f977d965429692d74cd8"
[[package]]
name = "either"
version = "1.15.0"
@@ -876,13 +847,13 @@ dependencies = [
]
[[package]]
name = "futf"
version = "0.1.5"
name = "fs-err"
version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843"
checksum = "88d7be93788013f265201256d58f04936a8079ad5dc898743aa20525f503b683"
dependencies = [
"mac",
"new_debug_unreachable",
"autocfg",
"tokio",
]
[[package]]
@@ -985,15 +956,6 @@ dependencies = [
"slab",
]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]]
name = "generic-array"
version = "0.14.7"
@@ -1005,12 +967,13 @@ dependencies = [
]
[[package]]
name = "getopts"
version = "0.2.24"
name = "gethostname"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df"
checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e"
dependencies = [
"unicode-width",
"libc",
"winapi",
]
[[package]]
@@ -1136,17 +1099,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "html5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4"
dependencies = [
"log",
"markup5ever",
"match_token",
]
[[package]]
name = "http"
version = "1.3.1"
@@ -1550,43 +1502,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "mac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "markdown"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5cab8f2cadc416a82d2e783a1946388b31654d391d1c7d92cc1f03e295b1deb"
dependencies = [
"unicode-id",
]
[[package]]
name = "markup5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3"
dependencies = [
"log",
"tendril",
"web_atoms",
]
[[package]]
name = "match_token"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "matchers"
version = "0.1.0"
@@ -1660,12 +1575,6 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "new_debug_unreachable"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]]
name = "nom"
version = "7.1.3"
@@ -1900,58 +1809,6 @@ dependencies = [
"sha2",
]
[[package]]
name = "phf"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_macros",
"phf_shared",
]
[[package]]
name = "phf_codegen"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared",
]
[[package]]
name = "phf_generator"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
"rand 0.8.5",
]
[[package]]
name = "phf_macros"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "phf_shared"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
dependencies = [
"siphasher",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
@@ -2015,12 +1872,6 @@ dependencies = [
"zerocopy",
]
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]]
name = "proc-macro-error-attr2"
version = "2.0.0"
@@ -2092,9 +1943,9 @@ dependencies = [
[[package]]
name = "quinn"
version = "0.11.9"
version = "0.11.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8"
dependencies = [
"bytes",
"cfg_aliases",
@@ -2103,7 +1954,7 @@ dependencies = [
"quinn-udp",
"rustc-hash",
"rustls",
"socket2 0.6.0",
"socket2 0.5.10",
"thiserror",
"tokio",
"tracing",
@@ -2112,9 +1963,9 @@ dependencies = [
[[package]]
name = "quinn-proto"
version = "0.11.13"
version = "0.11.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e"
dependencies = [
"bytes",
"getrandom 0.3.3",
@@ -2133,14 +1984,14 @@ dependencies = [
[[package]]
name = "quinn-udp"
version = "0.5.14"
version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970"
dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2 0.6.0",
"socket2 0.5.10",
"tracing",
"windows-sys 0.59.0",
]
@@ -2431,6 +2282,15 @@ dependencies = [
"zeroize",
]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]]
name = "rustls-pki-types"
version = "1.12.0"
@@ -2470,21 +2330,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scraper"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5f3a24d916e78954af99281a455168d4a9515d65eca99a18da1b813689c4ad9"
dependencies = [
"cssparser",
"ego-tree",
"getopts",
"html5ever",
"precomputed-hash",
"selectors",
"tendril",
]
[[package]]
name = "secrecy"
version = "0.10.3"
@@ -2495,25 +2340,6 @@ dependencies = [
"zeroize",
]
[[package]]
name = "selectors"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5685b6ae43bfcf7d2e7dfcfb5d8e8f61b46442c902531e41a32a9a8bf0ee0fb6"
dependencies = [
"bitflags",
"cssparser",
"derive_more",
"fxhash",
"log",
"new_debug_unreachable",
"phf",
"phf_codegen",
"precomputed-hash",
"servo_arc",
"smallvec",
]
[[package]]
name = "semver"
version = "1.0.26"
@@ -2616,15 +2442,6 @@ dependencies = [
"serde",
]
[[package]]
name = "servo_arc"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "204ea332803bd95a0b60388590d59cf6468ec9becf626e2451f1d26a1d972de4"
dependencies = [
"stable_deref_trait",
]
[[package]]
name = "sha1"
version = "0.10.6"
@@ -2672,12 +2489,6 @@ dependencies = [
"rand_core 0.6.4",
]
[[package]]
name = "siphasher"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]]
name = "slab"
version = "0.4.11"
@@ -2936,31 +2747,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "string_cache"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f"
dependencies = [
"new_debug_unreachable",
"parking_lot",
"phf_shared",
"precomputed-hash",
"serde",
]
[[package]]
name = "string_cache_codegen"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
]
[[package]]
name = "stringprep"
version = "0.1.5"
@@ -3015,17 +2801,6 @@ dependencies = [
"syn",
]
[[package]]
name = "tendril"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
dependencies = [
"futf",
"mac",
"utf-8",
]
[[package]]
name = "thiserror"
version = "2.0.16"
@@ -3151,9 +2926,9 @@ dependencies = [
[[package]]
name = "tokio-rustls"
version = "0.26.3"
version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd"
checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
dependencies = [
"rustls",
"tokio",
@@ -3374,6 +3149,24 @@ dependencies = [
"syn",
]
[[package]]
name = "tracing-bunyan-formatter"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d637245a0d8774bd48df6482e086c59a8b5348a910c3b0579354045a9d82411"
dependencies = [
"ahash",
"gethostname",
"log",
"serde",
"serde_json",
"time",
"tracing",
"tracing-core",
"tracing-log 0.1.4",
"tracing-subscriber",
]
[[package]]
name = "tracing-core"
version = "0.1.34"
@@ -3384,6 +3177,17 @@ dependencies = [
"valuable",
]
[[package]]
name = "tracing-log"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
@@ -3410,7 +3214,7 @@ dependencies = [
"thread_local",
"tracing",
"tracing-core",
"tracing-log",
"tracing-log 0.2.0",
]
[[package]]
@@ -3455,12 +3259,6 @@ version = "0.3.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5c1cb5db39152898a79168971543b1cb5020dff7fe43c8dc468b0885f5e29df5"
[[package]]
name = "unicode-id"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ba288e709927c043cbe476718d37be306be53fb1fafecd0dbe36d072be2580"
[[package]]
name = "unicode-ident"
version = "1.0.18"
@@ -3488,12 +3286,6 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
[[package]]
name = "untrusted"
version = "0.9.0"
@@ -3517,12 +3309,6 @@ version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]]
name = "utf8_iter"
version = "1.0.4"
@@ -3710,18 +3496,6 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "web_atoms"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414"
dependencies = [
"phf",
"phf_codegen",
"string_cache",
"string_cache_codegen",
]
[[package]]
name = "webpki-roots"
version = "0.26.11"
@@ -4070,22 +3844,23 @@ dependencies = [
"argon2",
"askama",
"axum",
"axum-server",
"base64 0.22.1",
"chrono",
"claims",
"config",
"fake",
"linkify",
"markdown",
"once_cell",
"quickcheck",
"quickcheck_macros",
"rand 0.9.2",
"reqwest",
"scraper",
"secrecy",
"serde",
"serde-aux",
"serde_json",
"serde_urlencoded",
"sqlx",
"thiserror",
"tokio",
@@ -4093,7 +3868,10 @@ dependencies = [
"tower-sessions",
"tower-sessions-redis-store",
"tracing",
"tracing-bunyan-formatter",
"tracing-subscriber",
"unicode-segmentation",
"urlencoding",
"uuid",
"validator",
"wiremock",

View File

@@ -11,31 +11,20 @@ path = "src/lib.rs"
path = "src/main.rs"
name = "zero2prod"
[profile.release]
opt-level = 'z'
lto = true
codegen-units = 1
panic = 'abort'
strip = true
rpath = false
debug = false
debug-assertions = false
overflow-checks = false
incremental = false
[dependencies]
anyhow = "1.0.99"
argon2 = { version = "0.5.3", features = ["std"] }
askama = "0.14.0"
axum = { version = "0.8.4", features = ["macros"] }
axum-server = { version = "0.7.2", features = ["tls-rustls-no-provider"] }
base64 = "0.22.1"
chrono = { version = "0.4.41", default-features = false, features = ["clock"] }
config = "0.15.14"
markdown = "1.0.0"
rand = { version = "0.9.2", features = ["std_rng"] }
reqwest = { version = "0.12.23", default-features = false, features = [
"cookies",
"json",
"rustls-tls",
"json",
"cookies",
] }
secrecy = { version = "0.10.3", features = ["serde"] }
serde = { version = "1.0.219", features = ["derive"] }
@@ -54,7 +43,10 @@ tower-http = { version = "0.6.6", features = ["fs", "trace"] }
tower-sessions = "0.14.0"
tower-sessions-redis-store = "0.16.0"
tracing = "0.1.41"
tracing-bunyan-formatter = "0.3.10"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
unicode-segmentation = "1.12.0"
urlencoding = "2.1.3"
uuid = { version = "1.18.0", features = ["v4", "serde"] }
validator = { version = "0.20.0", features = ["derive"] }
@@ -65,6 +57,6 @@ linkify = "0.10.0"
once_cell = "1.21.3"
quickcheck = "1.0.3"
quickcheck_macros = "1.1.0"
scraper = "0.24.0"
serde_json = "1.0.143"
serde_urlencoded = "0.7.1"
wiremock = "0.6.4"

View File

@@ -1,6 +1,5 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.90.0 AS chef
FROM lukemathwalker/cargo-chef:latest-rust-1.89.0 AS chef
WORKDIR /app
RUN apt update && apt install -y nodejs npm clang mold && rm -rf /var/lib/apt/lists/*
FROM chef AS planner
COPY . .
@@ -8,17 +7,21 @@ RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json
RUN apt update -y \
&& apt install -y --no-install-recommends clang mold
RUN cargo chef cook --release --recipe-path recipe.json
COPY . .
ENV SQLX_OFFLINE=true
ENV RUSTFLAGS="-C strip=symbols"
RUN cargo build --release --bin zero2prod
RUN npm install && npm run build-css
FROM gcr.io/distroless/cc-debian12 AS runtime
FROM debian:bookworm-slim AS runtime
WORKDIR /app
RUN apt update -y \
&& apt install -y --no-install-recommends openssl ca-certificates \
&& apt autoremove -y \
&& apt clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/zero2prod zero2prod
COPY --from=builder /app/assets assets
COPY --from=builder /app/configuration configuration
COPY configuration configuration
ENV APP_ENVIRONMENT=production
ENTRYPOINT [ "./zero2prod" ]

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 874 B

View File

@@ -2,16 +2,14 @@ application:
port: 8080
host: "127.0.0.1"
base_url: "http://127.0.0.1:8080"
email_client:
authorization_token: "secret-token"
database:
host: "127.0.0.1"
port: 5432
database_name: "newsletter"
username: "postgres"
password: "password"
password: "Jq09NF6Y8ZXJS4jd9c8U"
require_ssl: false
timeout_milliseconds: 1000
kv_store:
host: "127.0.0.1"
port: 6379
email_client:
authorization_token: "secret-token"
redis_uri: "redis://127.0.0.1:6379"
require_tls: false

View File

@@ -1,4 +1,2 @@
application:
host: "0.0.0.0"
database:
timeout_milliseconds: 500

View File

@@ -1,5 +0,0 @@
ALTER TABLE subscriptions ADD COLUMN unsubscribe_token TEXT UNIQUE;
UPDATE subscriptions
SET unsubscribe_token = left(md5(random()::text), 25)
WHERE status = 'confirmed' AND unsubscribe_token IS NULL;

View File

@@ -1,8 +0,0 @@
ALTER TABLE subscription_tokens
DROP CONSTRAINT subscription_tokens_subscriber_id_fkey;
ALTER TABLE subscription_tokens
ADD CONSTRAINT subscription_tokens_subscriber_id_fkey
FOREIGN KEY (subscriber_id)
REFERENCES subscriptions (id)
ON DELETE CASCADE;

View File

@@ -1 +0,0 @@
ALTER TABLE issue_delivery_queue ADD COLUMN unsubscribe_token TEXT NOT NULL;

View File

@@ -1 +0,0 @@
ALTER TABLE issue_delivery_queue ADD COLUMN kind TEXT NOT NULL;

View File

@@ -1,7 +0,0 @@
CREATE TABLE notifications_delivered (
email_id UUID PRIMARY KEY,
newsletter_issue_id UUID NOT NULL
REFERENCES newsletter_issues (newsletter_issue_id),
delivered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
opened BOOLEAN NOT NULL DEFAULT FALSE
);

View File

@@ -1,7 +0,0 @@
CREATE TYPE user_role AS ENUM ('admin', 'writer');
ALTER TABLE users ADD COLUMN role user_role;
UPDATE users SET role = 'admin' WHERE role IS NULL;
ALTER TABLE users ALTER COLUMN role SET NOT NULL;

View File

@@ -1,4 +0,0 @@
ALTER TABLE users
ADD COLUMN full_name TEXT,
ADD COLUMN bio TEXT,
ADD COLUMN member_since TIMESTAMPTZ NOT NULL DEFAULT NOW();

View File

@@ -1,7 +0,0 @@
CREATE TABLE comments (
comment_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
post_id UUID NOT NULL REFERENCES posts (post_id) ON DELETE CASCADE,
author TEXT,
content TEXT NOT NULL,
published_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

View File

@@ -1,11 +0,0 @@
ALTER TABLE idempotency
DROP CONSTRAINT idempotency_user_id_fkey;
ALTER TABLE idempotency
DROP CONSTRAINT idempotency_pkey;
ALTER TABLE idempotency
ADD PRIMARY KEY (idempotency_key);
ALTER TABLE idempotency
DROP COLUMN user_id;

View File

@@ -1,3 +0,0 @@
ALTER TABLE comments
ADD COLUMN user_id UUID
REFERENCES users (user_id) ON DELETE SET NULL;

View File

@@ -1,2 +0,0 @@
ALTER TABLE posts
ADD COLUMN last_modified TIMESTAMPTZ;

View File

@@ -1,5 +0,0 @@
CREATE TABLE user_logins (
id BIGSERIAL PRIMARY KEY,
user_id UUID NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
login_time TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

53
package-lock.json generated
View File

@@ -7,9 +7,6 @@
"dependencies": {
"@tailwindcss/cli": "^4.1.13",
"tailwindcss": "^4.1.13"
},
"devDependencies": {
"@tailwindcss/typography": "^0.5.18"
}
},
"node_modules/@isaacs/fs-minipass": {
@@ -639,19 +636,6 @@
"node": ">=8"
}
},
"node_modules/@tailwindcss/typography": {
"version": "0.5.18",
"resolved": "https://registry.npmjs.org/@tailwindcss/typography/-/typography-0.5.18.tgz",
"integrity": "sha512-dDIgwZOlf+tVkZ7A029VvQ1+ngKATENDjMEx2N35s2yPjfTS05RWSM8ilhEWSa5DMJ6ci2Ha9WNZEd2GQjrdQg==",
"dev": true,
"license": "MIT",
"dependencies": {
"postcss-selector-parser": "6.0.10"
},
"peerDependencies": {
"tailwindcss": ">=3.0.0 || insiders || >=4.0.0-alpha.20 || >=4.0.0-beta.1"
}
},
"node_modules/braces": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz",
@@ -673,19 +657,6 @@
"node": ">=18"
}
},
"node_modules/cssesc": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
"integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==",
"dev": true,
"license": "MIT",
"bin": {
"cssesc": "bin/cssesc"
},
"engines": {
"node": ">=4"
}
},
"node_modules/detect-libc": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz",
@@ -1096,20 +1067,6 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/postcss-selector-parser": {
"version": "6.0.10",
"resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.0.10.tgz",
"integrity": "sha512-IQ7TZdoaqbT+LCpShg46jnZVlhWD2w6iQYAcYXfHARZ7X1t/UGhhceQDs5X0cGqKvYlHNOuv7Oa1xmb0oQuA3w==",
"dev": true,
"license": "MIT",
"dependencies": {
"cssesc": "^3.0.0",
"util-deprecate": "^1.0.2"
},
"engines": {
"node": ">=4"
}
},
"node_modules/source-map-js": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz",
@@ -1123,8 +1080,7 @@
"version": "4.1.13",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.13.tgz",
"integrity": "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w==",
"license": "MIT",
"peer": true
"license": "MIT"
},
"node_modules/tapable": {
"version": "2.2.3",
@@ -1168,13 +1124,6 @@
"node": ">=8.0"
}
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==",
"dev": true,
"license": "MIT"
},
"node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",

View File

@@ -1,13 +1,9 @@
{
"scripts": {
"build-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --minify",
"watch-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --watch"
"build-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --minify --watch"
},
"dependencies": {
"@tailwindcss/cli": "^4.1.13",
"tailwindcss": "^4.1.13"
},
"devDependencies": {
"@tailwindcss/typography": "^0.5.18"
}
}

View File

@@ -14,7 +14,7 @@ if ! [ -x "$(command -v sqlx)" ]; then
fi
DB_USER="${POSTGRES_USER:=postgres}"
DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
DB_PASSWORD="${POSTGRES_PASSWORD:=Jq09NF6Y8ZXJS4jd9c8U}"
DB_NAME="${POSTGRES_DB:=newsletter}"
DB_PORT="${POSTGRES_PORT:=5432}"
DB_HOST="${POSTGRES_HOST:=localhost}"

View File

@@ -1,12 +1,14 @@
use crate::telemetry::spawn_blocking_with_tracing;
use crate::{
routes::AdminError, session_state::TypedSession, telemetry::spawn_blocking_with_tracing,
};
use anyhow::Context;
use argon2::{
Algorithm, Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier, Version,
password_hash::{SaltString, rand_core::OsRng},
};
use axum::{extract::Request, middleware::Next, response::Response};
use secrecy::{ExposeSecret, SecretString};
use sqlx::PgPool;
use std::fmt::Display;
use uuid::Uuid;
pub struct Credentials {
@@ -42,7 +44,7 @@ pub async fn change_password(
Ok(())
}
pub(crate) fn compute_pasword_hash(password: SecretString) -> Result<SecretString, anyhow::Error> {
fn compute_pasword_hash(password: SecretString) -> Result<SecretString, anyhow::Error> {
let salt = SaltString::generate(&mut OsRng);
let password_hash = Argon2::new(
Algorithm::Argon2id,
@@ -54,13 +56,15 @@ pub(crate) fn compute_pasword_hash(password: SecretString) -> Result<SecretStrin
Ok(SecretString::from(password_hash))
}
#[tracing::instrument(name = "Validate credentials", skip_all)]
#[tracing::instrument(
name = "Validate credentials",
skip(username, password, connection_pool)
)]
pub async fn validate_credentials(
Credentials { username, password }: Credentials,
connection_pool: &PgPool,
) -> Result<(Uuid, Role), AuthError> {
) -> Result<Uuid, AuthError> {
let mut user_id = None;
let mut role = None;
let mut expected_password_hash = SecretString::from(
"$argon2id$v=19$m=15000,t=2,p=1$\
gZiV/M1gPc22ElAH/Jh1Hw$\
@@ -68,14 +72,13 @@ CWOrkoo7oJBQ/iyh7uJ0LO2aLEfrHwTWllSAxT0zRno"
.to_string(),
);
if let Some((stored_user_id, stored_expected_password_hash, stored_role)) =
if let Some((stored_user_id, stored_expected_password_hash)) =
get_stored_credentials(&username, connection_pool)
.await
.context("Failed to retrieve credentials from database.")
.map_err(AuthError::UnexpectedError)?
{
user_id = Some(stored_user_id);
role = Some(stored_role);
expected_password_hash = stored_expected_password_hash;
}
@@ -86,19 +89,18 @@ CWOrkoo7oJBQ/iyh7uJ0LO2aLEfrHwTWllSAxT0zRno"
.ok_or_else(|| anyhow::anyhow!("Unknown username."))
.map_err(AuthError::InvalidCredentials)?;
let role = role
.ok_or_else(|| anyhow::anyhow!("Unknown role."))
.map_err(AuthError::UnexpectedError)?;
handle
.await
.context("Failed to spawn blocking task.")
.map_err(AuthError::UnexpectedError)?
.map_err(AuthError::InvalidCredentials)
.map(|_| (uuid, role))
.map(|_| uuid)
}
#[tracing::instrument(name = "Verify password", skip_all)]
#[tracing::instrument(
name = "Verify password",
skip(expected_password_hash, password_candidate)
)]
fn verify_password_hash(
expected_password_hash: SecretString,
password_candidate: SecretString,
@@ -113,14 +115,14 @@ fn verify_password_hash(
.context("Password verification failed.")
}
#[tracing::instrument(name = "Get stored credentials", skip(connection_pool))]
#[tracing::instrument(name = "Get stored credentials", skip(username, connection_pool))]
async fn get_stored_credentials(
username: &str,
connection_pool: &PgPool,
) -> Result<Option<(Uuid, SecretString, Role)>, sqlx::Error> {
) -> Result<Option<(Uuid, SecretString)>, sqlx::Error> {
let row = sqlx::query!(
r#"
SELECT user_id, password_hash, role as "role: Role"
SELECT user_id, password_hash
FROM users
WHERE username = $1
"#,
@@ -128,35 +130,37 @@ async fn get_stored_credentials(
)
.fetch_optional(connection_pool)
.await?
.map(|row| (row.user_id, SecretString::from(row.password_hash), row.role));
.map(|row| (row.user_id, SecretString::from(row.password_hash)));
Ok(row)
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Copy, PartialEq, Eq, sqlx::Type)]
#[sqlx(type_name = "user_role", rename_all = "lowercase")]
pub enum Role {
Admin,
Writer,
}
pub async fn require_auth(
session: TypedSession,
mut request: Request,
next: Next,
) -> Result<Response, AdminError> {
let user_id = session
.get_user_id()
.await
.map_err(|e| AdminError::UnexpectedError(e.into()))?
.ok_or(AdminError::NotAuthenticated)?;
let username = session
.get_username()
.await
.map_err(|e| AdminError::UnexpectedError(e.into()))?
.ok_or(AdminError::UnexpectedError(anyhow::anyhow!(
"Could not find username in session."
)))?;
impl Display for Role {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Role::Admin => write!(f, "admin"),
Role::Writer => write!(f, "writer"),
}
}
request
.extensions_mut()
.insert(AuthenticatedUser { user_id, username });
Ok(next.run(request).await)
}
#[derive(Clone)]
pub struct AuthenticatedUser {
pub user_id: Uuid,
pub username: String,
pub role: Role,
}
impl AuthenticatedUser {
pub fn is_admin(&self) -> bool {
matches!(self.role, Role::Admin)
}
}

View File

@@ -1,13 +1,8 @@
use crate::domain::SubscriberEmail;
use anyhow::Context;
use secrecy::{ExposeSecret, SecretString};
use serde::Deserialize;
use serde_aux::field_attributes::deserialize_number_from_string;
use sqlx::postgres::{PgConnectOptions, PgSslMode};
use tower_sessions_redis_store::{
RedisStore,
fred::prelude::{ClientLike, Pool},
};
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
@@ -65,7 +60,8 @@ pub struct Settings {
pub application: ApplicationSettings,
pub database: DatabaseSettings,
pub email_client: EmailClientSettings,
pub kv_store: RedisSettings,
pub redis_uri: SecretString,
pub require_tls: bool,
}
#[derive(Clone, Deserialize)]
@@ -105,35 +101,6 @@ impl EmailClientSettings {
}
}
#[derive(Clone, Deserialize)]
pub struct RedisSettings {
pub host: String,
pub port: u16,
}
impl RedisSettings {
pub fn connection_string(&self) -> String {
format!("redis://{}:{}", self.host, self.port)
}
pub async fn session_store(&self) -> Result<RedisStore<Pool>, anyhow::Error> {
let pool = Pool::new(
tower_sessions_redis_store::fred::prelude::Config::from_url(&self.connection_string())
.context("Failed to parse Redis URL string.")?,
None,
None,
None,
6,
)
.unwrap();
pool.connect();
pool.wait_for_connect()
.await
.context("Failed to connect to the Redis server.")?;
Ok(RedisStore::new(pool))
}
}
#[derive(Clone, Deserialize)]
pub struct DatabaseSettings {
pub username: String,
@@ -143,7 +110,6 @@ pub struct DatabaseSettings {
pub host: String,
pub database_name: String,
pub require_ssl: bool,
pub timeout_milliseconds: u64,
}
impl DatabaseSettings {

View File

@@ -1,58 +0,0 @@
use anyhow::Context;
use sqlx::{
PgPool,
postgres::{PgConnectOptions, PgPoolOptions},
};
use std::time::Duration;
pub async fn run_until_stopped(configuration: PgConnectOptions) -> Result<(), anyhow::Error> {
let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration);
worker_loop(connection_pool).await
}
async fn worker_loop(connection_pool: PgPool) -> Result<(), anyhow::Error> {
loop {
if let Err(e) = clean_pending_subscriptions(&connection_pool).await {
tracing::error!("{:?}", e);
}
if let Err(e) = clean_idempotency_keys(&connection_pool).await {
tracing::error!("{:?}", e);
}
tokio::time::sleep(Duration::from_secs(60)).await;
}
}
async fn clean_pending_subscriptions(connection_pool: &PgPool) -> Result<(), anyhow::Error> {
let result = sqlx::query!(
"
DELETE FROM subscriptions
WHERE status = 'pending_confirmation'
AND subscribed_at < NOW() - INTERVAL '24 hours'
"
)
.execute(connection_pool)
.await
.context("Failed to clean up subscriptions table.")?;
match result.rows_affected() {
n if n > 0 => tracing::info!("Cleaned up {} expired subscriptions.", n),
_ => (),
}
Ok(())
}
async fn clean_idempotency_keys(connection_pool: &PgPool) -> Result<(), anyhow::Error> {
let result = sqlx::query!(
"
DELETE FROM idempotency
WHERE created_at < NOW() - INTERVAL '1 hour'
"
)
.execute(connection_pool)
.await
.context("Failed to clean up idempontency table.")?;
match result.rows_affected() {
n if n > 0 => tracing::info!("Cleaned up {} old idempotency records.", n),
_ => (),
}
Ok(())
}

View File

@@ -1,13 +1,7 @@
mod comment;
mod new_subscriber;
mod post;
mod subscriber_email;
mod subscribers;
mod user;
pub use comment::CommentEntry;
pub use new_subscriber::NewSubscriber;
pub use post::PostEntry;
pub use subscriber_email::SubscriberEmail;
pub use subscribers::SubscriberEntry;
pub use user::UserEntry;

View File

@@ -1,18 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct CommentEntry {
pub user_id: Option<Uuid>,
pub username: Option<String>,
pub comment_id: Uuid,
pub post_id: Uuid,
pub author: Option<String>,
pub content: String,
pub published_at: DateTime<Utc>,
}
impl CommentEntry {
pub fn formatted_date(&self) -> String {
self.published_at.format("%B %d, %Y %H:%M").to_string()
}
}

View File

@@ -3,24 +3,15 @@ use uuid::Uuid;
pub struct PostEntry {
pub post_id: Uuid,
pub author_id: Uuid,
pub author: String,
pub full_name: Option<String>,
pub author: Option<String>,
pub title: String,
pub content: String,
pub published_at: DateTime<Utc>,
pub last_modified: Option<DateTime<Utc>>,
}
impl PostEntry {
#[allow(dead_code)]
pub fn formatted_date(&self) -> String {
self.published_at.format("%B %d, %Y %H:%M").to_string()
}
pub fn to_html(&self) -> anyhow::Result<String> {
match markdown::to_html_with_options(&self.content, &markdown::Options::gfm()) {
Ok(content) => Ok(content),
Err(e) => anyhow::bail!(e),
}
self.published_at.format("%B %d, %Y").to_string()
}
}

View File

@@ -1,5 +1,3 @@
use std::fmt::Display;
use validator::Validate;
#[derive(Debug, Validate)]
@@ -24,12 +22,6 @@ impl AsRef<str> for SubscriberEmail {
}
}
impl Display for SubscriberEmail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.email)
}
}
#[cfg(test)]
mod tests {
use super::SubscriberEmail;

View File

@@ -1,21 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct SubscriberEntry {
pub id: Uuid,
pub email: String,
pub subscribed_at: DateTime<Utc>,
pub status: String,
pub unsubscribe_token: Option<String>,
}
impl SubscriberEntry {
pub fn confirmed(&self) -> bool {
self.status == "confirmed"
}
#[allow(dead_code)]
pub fn formatted_date(&self) -> String {
self.subscribed_at.format("%B %d, %Y").to_string()
}
}

View File

@@ -1,22 +0,0 @@
use crate::authentication::Role;
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct UserEntry {
pub user_id: Uuid,
pub username: String,
pub role: Role,
pub full_name: Option<String>,
pub bio: Option<String>,
pub member_since: DateTime<Utc>,
}
impl UserEntry {
pub fn formatted_date(&self) -> String {
self.member_since.format("%B %d, %Y").to_string()
}
pub fn is_admin(&self) -> bool {
matches!(self.role, Role::Admin)
}
}

View File

@@ -33,8 +33,12 @@ impl EmailClient {
) -> Result<(), reqwest::Error> {
let url = self.base_url.join("email").unwrap();
let request_body = SendEmailRequest {
from: self.sender.as_ref(),
to: recipient.as_ref(),
from: EmailField {
email: self.sender.as_ref(),
},
to: vec![EmailField {
email: recipient.as_ref(),
}],
subject,
text: text_content,
html: html_content,
@@ -57,13 +61,18 @@ impl EmailClient {
#[derive(serde::Serialize)]
struct SendEmailRequest<'a> {
from: &'a str,
to: &'a str,
from: EmailField<'a>,
to: Vec<EmailField<'a>>,
subject: &'a str,
text: &'a str,
html: &'a str,
}
#[derive(serde::Serialize)]
struct EmailField<'a> {
email: &'a str,
}
#[cfg(test)]
mod tests {
use crate::{
@@ -119,7 +128,7 @@ mod tests {
EmailClient::build(settings).unwrap()
}
#[sqlx::test]
#[tokio::test]
async fn send_email_sends_the_expected_request() {
let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri());
@@ -141,7 +150,7 @@ mod tests {
.unwrap();
}
#[sqlx::test]
#[tokio::test]
async fn send_email_succeeds_if_the_server_returns_200() {
let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri());
@@ -159,7 +168,7 @@ mod tests {
assert_ok!(response);
}
#[sqlx::test]
#[tokio::test]
async fn send_email_fails_if_the_server_retuns_500() {
let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri());
@@ -177,7 +186,7 @@ mod tests {
assert_err!(response);
}
#[sqlx::test]
#[tokio::test]
async fn send_email_times_out_if_the_server_takes_too_long() {
let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri());

View File

@@ -1,4 +1,3 @@
#[derive(Debug)]
pub struct IdempotencyKey(String);
impl TryFrom<String> for IdempotencyKey {

View File

@@ -7,6 +7,7 @@ use axum::{
use reqwest::StatusCode;
use sqlx::{Executor, PgPool, Postgres, Transaction};
use std::str::FromStr;
use uuid::Uuid;
#[derive(Debug, sqlx::Type)]
#[sqlx(type_name = "header_pair")]
@@ -15,13 +16,10 @@ struct HeaderPairRecord {
value: Vec<u8>,
}
#[tracing::instrument(
name = "Fetching saved response in database if it exists",
skip(connection_pool)
)]
pub async fn get_saved_response(
connection_pool: &PgPool,
idempotency_key: &IdempotencyKey,
user_id: Uuid,
) -> Result<Option<Response>, anyhow::Error> {
let saved_response = sqlx::query!(
r#"
@@ -30,8 +28,11 @@ pub async fn get_saved_response(
response_headers as "response_headers!: Vec<HeaderPairRecord>",
response_body as "response_body!"
FROM idempotency
WHERE idempotency_key = $1
WHERE
user_id = $1
AND idempotency_key = $2
"#,
user_id,
idempotency_key.as_ref()
)
.fetch_optional(connection_pool)
@@ -52,10 +53,10 @@ pub async fn get_saved_response(
}
}
#[tracing::instrument(name = "Saving response in database", skip(transaction, response))]
pub async fn save_response(
mut transaction: Transaction<'static, Postgres>,
idempotency_key: &IdempotencyKey,
user_id: Uuid,
response: Response<Body>,
) -> Result<Response<Body>, anyhow::Error> {
let status_code = response.status().as_u16() as i16;
@@ -74,11 +75,14 @@ pub async fn save_response(
r#"
UPDATE idempotency
SET
response_status_code = $2,
response_headers = $3,
response_body = $4
WHERE idempotency_key = $1
response_status_code = $3,
response_headers = $4,
response_body = $5
WHERE
user_id = $1
AND idempotency_key = $2
"#,
user_id,
idempotency_key.as_ref(),
status_code,
headers,
@@ -100,21 +104,23 @@ pub enum NextAction {
pub async fn try_processing(
connection_pool: &PgPool,
idempotency_key: &IdempotencyKey,
user_id: Uuid,
) -> Result<NextAction, anyhow::Error> {
let mut transaction = connection_pool.begin().await?;
let query = sqlx::query!(
r#"
INSERT INTO idempotency (idempotency_key, created_at)
VALUES ($1, now())
INSERT INTO idempotency (user_id, idempotency_key, created_at)
VALUES ($1, $2, now())
ON CONFLICT DO NOTHING
"#,
user_id,
idempotency_key.as_ref()
);
let n_inserted_rows = transaction.execute(query).await?.rows_affected();
if n_inserted_rows > 0 {
Ok(NextAction::StartProcessing(transaction))
} else {
let saved_response = get_saved_response(connection_pool, idempotency_key)
let saved_response = get_saved_response(connection_pool, idempotency_key, user_id)
.await?
.ok_or_else(|| anyhow::anyhow!("Could not find saved response."))?;
Ok(NextAction::ReturnSavedResponse(saved_response))

View File

@@ -1,13 +1,10 @@
use crate::{
configuration::Settings, domain::SubscriberEmail, email_client::EmailClient, routes::EmailType,
};
use anyhow::Context;
use crate::{configuration::Settings, domain::SubscriberEmail, email_client::EmailClient};
use sqlx::{Executor, PgPool, Postgres, Row, Transaction, postgres::PgPoolOptions};
use std::time::Duration;
use tracing::{Span, field::display};
use uuid::Uuid;
pub async fn run_until_stopped(configuration: Settings) -> Result<(), anyhow::Error> {
pub async fn run_worker_until_stopped(configuration: Settings) -> Result<(), anyhow::Error> {
let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db());
let email_client = EmailClient::build(configuration.email_client).unwrap();
worker_loop(connection_pool, email_client).await
@@ -31,82 +28,63 @@ pub enum ExecutionOutcome {
EmptyQueue,
}
#[tracing::instrument(
skip_all,
fields(
newsletter_issue_id=tracing::field::Empty,
subscriber_email=tracing::field::Empty
),
err
)]
pub async fn try_execute_task(
connection_pool: &PgPool,
email_client: &EmailClient,
) -> Result<ExecutionOutcome, anyhow::Error> {
let task = dequeue_task(connection_pool).await?;
let (mut transaction, task) = match task {
Some((transaction, task)) => (transaction, task),
None => return Ok(ExecutionOutcome::EmptyQueue),
};
if task.is_none() {
return Ok(ExecutionOutcome::EmptyQueue);
}
let (transaction, issue_id, email) = task.unwrap();
Span::current()
.record("newsletter_issue_id", display(task.newsletter_issue_id))
.record("subscriber_email", display(&task.subscriber_email));
match SubscriberEmail::parse(task.subscriber_email.clone()) {
.record("newsletter_issue_id", display(issue_id))
.record("subscriber_email", display(&email));
match SubscriberEmail::parse(email.clone()) {
Ok(email) => {
execute_task(
connection_pool,
&mut transaction,
&task,
email,
email_client,
let issue = get_issue(connection_pool, issue_id).await?;
if let Err(e) = email_client
.send_email(
&email,
&issue.title,
&issue.html_content,
&issue.text_content,
)
.await?;
.await
{
tracing::error!(
error.message = %e,
"Failed to deliver issue to confirmed subscriber. Skipping."
);
}
}
Err(e) => {
tracing::error!(
error = %e,
error.message = %e,
"Skipping a subscriber. Their stored contact details are invalid."
);
}
}
delete_task(
transaction,
task.newsletter_issue_id,
&task.subscriber_email,
)
.await?;
delete_task(transaction, issue_id, &email).await?;
Ok(ExecutionOutcome::TaskCompleted)
}
struct NewsletterIssue {
newsletter_issue_id: Uuid,
title: String,
text_content: String,
html_content: String,
}
impl NewsletterIssue {
fn inject_unsubscribe_token(&mut self, token: &str) {
self.text_content = self.text_content.replace("UNSUBSCRIBE_TOKEN", token);
self.html_content = self.html_content.replace("UNSUBSCRIBE_TOKEN", token);
}
async fn inject_tracking_info(
&mut self,
transaction: &mut Transaction<'static, Postgres>,
) -> Result<(), anyhow::Error> {
let email_id = Uuid::new_v4();
let query = sqlx::query!(
r#"
INSERT INTO notifications_delivered (email_id, newsletter_issue_id)
VALUES ($1, $2)
"#,
email_id,
self.newsletter_issue_id
);
transaction
.execute(query)
.await
.context("Failed to store email tracking info.")?;
self.text_content = self.text_content.replace("EMAIL_ID", &email_id.to_string());
self.html_content = self.html_content.replace("EMAIL_ID", &email_id.to_string());
Ok(())
}
}
#[tracing::instrument(skip_all)]
async fn get_issue(
connection_pool: &PgPool,
issue_id: Uuid,
@@ -114,7 +92,7 @@ async fn get_issue(
let issue = sqlx::query_as!(
NewsletterIssue,
r#"
SELECT newsletter_issue_id, title, text_content, html_content
SELECT title, text_content, html_content
FROM newsletter_issues
WHERE newsletter_issue_id = $1
"#,
@@ -125,20 +103,14 @@ async fn get_issue(
Ok(issue)
}
pub struct Task {
pub newsletter_issue_id: Uuid,
pub subscriber_email: String,
pub unsubscribe_token: String,
pub kind: String,
}
#[tracing::instrument(skip_all)]
async fn dequeue_task(
connection_pool: &PgPool,
) -> Result<Option<(Transaction<'static, Postgres>, Task)>, anyhow::Error> {
) -> Result<Option<(Transaction<'static, Postgres>, Uuid, String)>, anyhow::Error> {
let mut transaction = connection_pool.begin().await?;
let query = sqlx::query!(
r#"
SELECT newsletter_issue_id, subscriber_email, unsubscribe_token, kind
SELECT newsletter_issue_id, subscriber_email
FROM issue_delivery_queue
FOR UPDATE
SKIP LOCKED
@@ -147,47 +119,17 @@ async fn dequeue_task(
);
let r = transaction.fetch_optional(query).await?;
if let Some(row) = r {
let task = Task {
newsletter_issue_id: row.get("newsletter_issue_id"),
subscriber_email: row.get("subscriber_email"),
unsubscribe_token: row.get("unsubscribe_token"),
kind: row.get("kind"),
};
Ok(Some((transaction, task)))
Ok(Some((
transaction,
row.get("newsletter_issue_id"),
row.get("subscriber_email"),
)))
} else {
Ok(None)
}
}
#[tracing::instrument(
name = "Executing task",
skip_all,
fields(email = %email),
)]
async fn execute_task(
connection_pool: &PgPool,
transaction: &mut Transaction<'static, Postgres>,
task: &Task,
email: SubscriberEmail,
email_client: &EmailClient,
) -> Result<(), anyhow::Error> {
let mut issue = get_issue(connection_pool, task.newsletter_issue_id).await?;
issue.inject_unsubscribe_token(&task.unsubscribe_token);
if task.kind == EmailType::NewPost.to_string() {
issue.inject_tracking_info(transaction).await?;
}
email_client
.send_email(
&email,
&issue.title,
&issue.html_content,
&issue.text_content,
)
.await
.context("Failed to deliver newsletter issue to subscriber..")?;
Ok(())
}
#[tracing::instrument(skip_all)]
async fn delete_task(
mut transaction: Transaction<'static, Postgres>,
issue_id: Uuid,

View File

@@ -1,6 +1,5 @@
pub mod authentication;
pub mod configuration;
pub mod database_worker;
pub mod domain;
pub mod email_client;
pub mod idempotency;

View File

@@ -1,6 +1,6 @@
use zero2prod::{
configuration::get_configuration, database_worker, issue_delivery_worker, startup::Application,
telemetry::init_subscriber,
configuration::get_configuration, issue_delivery_worker::run_worker_until_stopped,
startup::Application, telemetry::init_subscriber,
};
#[tokio::main]
@@ -11,16 +11,11 @@ async fn main() -> Result<(), anyhow::Error> {
let application = Application::build(configuration.clone()).await?;
let application_task = tokio::spawn(application.run_until_stopped());
let database_worker_task = tokio::spawn(database_worker::run_until_stopped(
configuration.database.with_db(),
));
let delivery_worker_task =
tokio::spawn(issue_delivery_worker::run_until_stopped(configuration));
let worker_task = tokio::spawn(run_worker_until_stopped(configuration));
tokio::select! {
_ = application_task => {},
_ = database_worker_task => {},
_ = delivery_worker_task => {},
_ = worker_task => {},
};
Ok(())

View File

@@ -1,61 +1,30 @@
mod admin;
mod comments;
mod health_check;
mod home;
mod login;
mod posts;
mod subscriptions;
mod subscriptions_confirm;
mod unsubscribe;
mod users;
pub use admin::*;
use askama::Template;
use axum::{
extract::FromRequestParts,
http::{HeaderMap, request::Parts},
http::HeaderMap,
response::{Html, IntoResponse, Response},
};
pub use comments::*;
pub use health_check::*;
pub use home::*;
pub use login::*;
pub use posts::*;
use rand::{Rng, distr::Alphanumeric};
use reqwest::StatusCode;
use serde::de::DeserializeOwned;
pub use subscriptions::*;
pub use subscriptions_confirm::*;
pub use unsubscribe::*;
pub use users::*;
use validator::ValidationErrors;
use crate::{
authentication::AuthError,
templates::{ErrorTemplate, HtmlTemplate, MessageTemplate},
templates::{InternalErrorTemplate, MessageTemplate, NotFoundTemplate},
};
pub fn generate_token() -> String {
let mut rng = rand::rng();
std::iter::repeat_with(|| rng.sample(Alphanumeric))
.map(char::from)
.take(25)
.collect()
}
fn error_chain_fmt(e: &impl std::error::Error, f: &mut std::fmt::Formatter) -> std::fmt::Result {
writeln!(f, "{}", e)?;
let mut current = e.source();
while let Some(cause) = current {
write!(f, "Caused by:\n\t{}", cause)?;
current = cause.source();
if current.is_some() {
writeln!(f)?;
}
}
Ok(())
}
#[derive(thiserror::Error)]
pub enum AppError {
#[error("An unexpected error was encountered.")]
@@ -68,8 +37,6 @@ pub enum AppError {
FormError(#[source] anyhow::Error),
#[error("Authentication is required.")]
NotAuthenticated,
#[error("Handler extractor failed.")]
Extractor(#[source] anyhow::Error),
}
impl From<anyhow::Error> for AppError {
@@ -113,16 +80,19 @@ impl IntoResponse for AppError {
full_page,
} => {
let html = if *full_page {
Html(ErrorTemplate::InternalServer.render().unwrap())
Html(InternalErrorTemplate.render().unwrap())
} else {
let template =
MessageTemplate::error("An internal server error occured.".into());
let template = MessageTemplate::Error {
message: "An internal server error occured.".into(),
};
Html(template.render().unwrap())
};
html.into_response()
(StatusCode::INTERNAL_SERVER_ERROR, html).into_response()
}
AppError::FormError(error) => {
let template = MessageTemplate::error(error.to_string());
let template = MessageTemplate::Error {
message: error.to_string(),
};
Html(template.render().unwrap()).into_response()
}
AppError::NotAuthenticated => {
@@ -130,7 +100,6 @@ impl IntoResponse for AppError {
headers.insert("HX-Redirect", "/login".parse().unwrap());
(StatusCode::OK, headers).into_response()
}
AppError::Extractor(_) => not_found_html(),
}
}
}
@@ -162,68 +131,9 @@ impl From<AuthError> for AppError {
}
pub async fn not_found() -> Response {
tracing::error!("Not found.");
not_found_html()
}
pub fn not_found_html() -> Response {
let template = HtmlTemplate(ErrorTemplate::NotFound);
(StatusCode::NOT_FOUND, template).into_response()
}
pub struct Path<T>(T);
impl<T, S> FromRequestParts<S> for Path<T>
where
T: DeserializeOwned + Send,
S: Send + Sync,
{
type Rejection = AppError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
match axum::extract::Path::<T>::from_request_parts(parts, state).await {
Ok(value) => Ok(Self(value.0)),
Err(rejection) => Err(AppError::Extractor(anyhow::anyhow!(
"Path rejection: {:?}",
rejection
))),
}
}
}
pub struct Query<T>(pub T);
impl<T, S> FromRequestParts<S> for Query<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = AppError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
match axum::extract::Query::<T>::from_request_parts(parts, state).await {
Ok(value) => Ok(Self(value.0)),
Err(rejection) => Err(AppError::Extractor(anyhow::anyhow!(
"Query rejection: {:?}",
rejection
))),
}
}
}
pub fn join_error_messages(e: ValidationErrors) -> String {
let error_messages: Vec<_> = e
.field_errors()
.iter()
.flat_map(|(field, errors)| {
errors.iter().map(move |error| {
error
.message
.as_ref()
.map(|msg| msg.to_string())
.unwrap_or(format!("Invalid field: {}", field))
})
})
.collect();
error_messages.join("\n")
(
StatusCode::NOT_FOUND,
Html(NotFoundTemplate.render().unwrap()),
)
.into_response()
}

View File

@@ -3,27 +3,18 @@ mod dashboard;
mod logout;
mod newsletters;
mod posts;
mod subscribers;
use crate::{
authentication::AuthenticatedUser,
routes::{AppError, error_chain_fmt},
session_state::TypedSession,
templates::{HtmlTemplate, MessageTemplate},
};
use anyhow::Context;
use axum::response::Redirect;
use axum::{
extract::Request,
middleware::Next,
response::{IntoResponse, Response},
};
use axum::{extract::Request, middleware::Next, response::Response};
pub use change_password::*;
pub use dashboard::*;
pub use logout::*;
pub use newsletters::*;
pub use posts::*;
pub use subscribers::*;
#[derive(thiserror::Error)]
pub enum AdminError {
@@ -32,7 +23,7 @@ pub enum AdminError {
#[error("Trying to access admin dashboard without authentication.")]
NotAuthenticated,
#[error("Updating password failed.")]
ChangePassword(anyhow::Error),
ChangePassword(String),
#[error("Could not publish newsletter.")]
Publish(#[source] anyhow::Error),
#[error("The idempotency key was invalid.")]
@@ -50,17 +41,11 @@ pub async fn require_auth(
mut request: Request,
next: Next,
) -> Result<Response, AppError> {
let user_id = match session
let user_id = session
.get_user_id()
.await
.map_err(|e| AdminError::UnexpectedError(e.into()))?
{
None => {
tracing::error!("Not authenticated. Redirecting to /login.");
return Ok(Redirect::to("/login").into_response());
}
Some(user_id) => user_id,
};
.ok_or(AdminError::NotAuthenticated)?;
let username = session
.get_username()
.await
@@ -68,36 +53,10 @@ pub async fn require_auth(
.ok_or(AdminError::UnexpectedError(anyhow::anyhow!(
"Could not find username in session."
)))?;
let role = session
.get_role()
.await
.context("Error retrieving user role in session.")?
.ok_or(anyhow::anyhow!("Could not find user role in session."))?;
request.extensions_mut().insert(AuthenticatedUser {
user_id,
username,
role,
});
request
.extensions_mut()
.insert(AuthenticatedUser { user_id, username });
Ok(next.run(request).await)
}
pub async fn require_admin(
session: TypedSession,
request: Request,
next: Next,
) -> Result<Response, AppError> {
if session
.has_admin_permissions()
.await
.context("Error retrieving user role in session.")?
{
Ok(next.run(request).await)
} else {
Ok(HtmlTemplate(MessageTemplate::error(
"This action requires administrator privileges.".into(),
))
.into_response())
}
}

View File

@@ -20,9 +20,7 @@ pub struct PasswordFormData {
}
pub async fn change_password(
Extension(AuthenticatedUser {
user_id, username, ..
}): Extension<AuthenticatedUser>,
Extension(AuthenticatedUser { user_id, username }): Extension<AuthenticatedUser>,
State(AppState {
connection_pool, ..
}): State<AppState>,
@@ -33,16 +31,16 @@ pub async fn change_password(
password: form.current_password,
};
if form.new_password.expose_secret() != form.new_password_check.expose_secret() {
Err(AdminError::ChangePassword(anyhow::anyhow!(
"You entered two different passwords - the field values must match."
))
Err(AdminError::ChangePassword(
"You entered two different passwords - the field values must match.".to_string(),
)
.into())
} else if let Err(e) = validate_credentials(credentials, &connection_pool).await {
match e {
AuthError::UnexpectedError(error) => Err(AdminError::UnexpectedError(error).into()),
AuthError::InvalidCredentials(_) => Err(AdminError::ChangePassword(anyhow::anyhow!(
"The current password is incorrect."
))
AuthError::InvalidCredentials(_) => Err(AdminError::ChangePassword(
"The current password is incorrect.".to_string(),
)
.into()),
}
} else if let Err(e) = verify_password(form.new_password.expose_secret()) {
@@ -50,15 +48,17 @@ pub async fn change_password(
} else {
authentication::change_password(user_id, form.new_password, &connection_pool)
.await
.map_err(AdminError::ChangePassword)?;
let template = MessageTemplate::success("Your password has been changed.".to_string());
.map_err(|e| AdminError::ChangePassword(e.to_string()))?;
let template = MessageTemplate::Success {
message: "Your password has been changed.".to_string(),
};
Ok(Html(template.render().unwrap()).into_response())
}
}
pub fn verify_password(password: &str) -> Result<(), anyhow::Error> {
fn verify_password(password: &str) -> Result<(), String> {
if password.len() < 12 || password.len() > 128 {
anyhow::bail!("The password must contain between 12 and 128 characters.");
return Err("The password must contain between 12 and 128 characters.".into());
}
Ok(())
}

View File

@@ -1,133 +1,20 @@
use crate::routes::{
COMMENTS_PER_PAGE, POSTS_PER_PAGE, SUBS_PER_PAGE, get_comments_count, get_comments_page,
get_posts_count, get_posts_page, get_users,
};
use crate::{
authentication::AuthenticatedUser,
routes::{AppError, get_max_page, get_subs, get_total_subs},
startup::AppState,
templates::DashboardTemplate,
};
use anyhow::Context;
use crate::{authentication::AuthenticatedUser, templates::DashboardTemplate};
use askama::Template;
use axum::{
Extension,
extract::State,
response::{Html, IntoResponse, Response},
};
use sqlx::PgPool;
use uuid::Uuid;
pub struct DashboardStats {
pub subscribers: i64,
pub posts: i64,
pub notifications_sent: i64,
pub open_rate: f64,
}
impl DashboardStats {
pub fn formatted_rate(&self) -> String {
format!("{:.1}%", self.open_rate)
}
}
pub async fn admin_dashboard(
State(AppState {
connection_pool, ..
}): State<AppState>,
Extension(user): Extension<AuthenticatedUser>,
) -> Result<Response, AppError> {
let stats = get_stats(&connection_pool).await?;
Extension(AuthenticatedUser { username, .. }): Extension<AuthenticatedUser>,
) -> Response {
let idempotency_key_1 = Uuid::new_v4().to_string();
let idempotency_key_2 = Uuid::new_v4().to_string();
let current_page = 1;
let subscribers = get_subs(&connection_pool, current_page)
.await
.context("Could not fetch subscribers from database.")
.map_err(AppError::unexpected_message)?;
let subs_count = get_total_subs(&connection_pool)
.await
.context("Could not fetch total subscribers count from the database.")?;
let max_page = get_max_page(subs_count, SUBS_PER_PAGE);
let users = get_users(&connection_pool)
.await
.context("Could not fetch users")?;
let posts = get_posts_page(&connection_pool, 1)
.await
.context("Could not fetch posts.")?;
let posts_current_page = 1;
let posts_count = get_posts_count(&connection_pool)
.await
.context("Could not fetch posts count.")?;
let posts_max_page = get_max_page(posts_count, POSTS_PER_PAGE);
let comments_current_page = 1;
let comments = get_comments_page(&connection_pool, comments_current_page)
.await
.context("Could not fetch comments.")?;
let comments_count = get_comments_count(&connection_pool)
.await
.context("Could not fetch comments count.")?;
let comments_max_page = get_max_page(comments_count, COMMENTS_PER_PAGE);
let template = DashboardTemplate {
user,
username,
idempotency_key_1,
idempotency_key_2,
stats,
subscribers,
current_page,
max_page,
count: subs_count,
users,
posts,
posts_current_page,
posts_max_page,
posts_count,
comments,
comments_current_page,
comments_max_page,
comments_count,
};
Ok(Html(template.render().unwrap()).into_response())
}
#[tracing::instrument("Computing dashboard stats", skip_all)]
async fn get_stats(connection_pool: &PgPool) -> Result<DashboardStats, anyhow::Error> {
let subscribers =
sqlx::query_scalar!("SELECT count(*) FROM subscriptions WHERE status = 'confirmed'")
.fetch_one(connection_pool)
.await
.context("Failed to fetch subscribers count.")?
.unwrap_or(0);
let posts = sqlx::query_scalar!("SELECT count(*) FROM posts")
.fetch_one(connection_pool)
.await
.context("Failed to fetch posts count.")?
.unwrap_or(0);
let notifications_sent = sqlx::query_scalar!("SELECT count(*) FROM notifications_delivered")
.fetch_one(connection_pool)
.await
.context("Failed to fetch notifications sent count.")?
.unwrap_or(0);
let opened =
sqlx::query_scalar!("SELECT count(*) FROM notifications_delivered WHERE opened = TRUE")
.fetch_one(connection_pool)
.await
.context("Failed to fetch notifications sent count.")?
.unwrap_or(0);
let open_rate = if notifications_sent == 0 {
0.0
} else {
(opened as f64) / (notifications_sent as f64) * 100.0
};
Ok(DashboardStats {
subscribers,
posts,
notifications_sent,
open_rate,
})
Html(template.render().unwrap()).into_response()
}

View File

@@ -1,18 +1,18 @@
use crate::{
authentication::AuthenticatedUser,
idempotency::{IdempotencyKey, save_response, try_processing},
routes::{AdminError, AppError},
startup::AppState,
templates::{EmailTemplate, MessageTemplate, StandaloneEmailTemplate},
templates::MessageTemplate,
};
use anyhow::Context;
use askama::Template;
use axum::{
Form,
Extension, Form,
extract::State,
response::{Html, IntoResponse, Response},
};
use sqlx::{Executor, Postgres, Transaction};
use std::fmt::Display;
use uuid::Uuid;
#[derive(serde::Deserialize)]
@@ -23,14 +23,14 @@ pub struct BodyData {
idempotency_key: String,
}
#[tracing::instrument(name = "Creating newsletter isue", skip_all, fields(issue_id = tracing::field::Empty))]
#[tracing::instrument(skip_all)]
pub async fn insert_newsletter_issue(
transaction: &mut Transaction<'static, Postgres>,
title: &str,
email_template: &dyn EmailTemplate,
text_content: &str,
html_content: &str,
) -> Result<Uuid, sqlx::Error> {
let newsletter_issue_id = Uuid::new_v4();
tracing::Span::current().record("issue_id", newsletter_issue_id.to_string());
let query = sqlx::query!(
r#"
INSERT INTO newsletter_issues (
@@ -40,60 +40,40 @@ pub async fn insert_newsletter_issue(
"#,
newsletter_issue_id,
title,
email_template.text(),
email_template.html(),
text_content,
html_content
);
transaction.execute(query).await?;
Ok(newsletter_issue_id)
}
#[derive(Debug)]
pub enum EmailType {
NewPost,
Newsletter,
}
impl Display for EmailType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EmailType::NewPost => write!(f, "new_post"),
EmailType::Newsletter => write!(f, "newsletter"),
}
}
}
#[tracing::instrument(name = "Adding new task to queue", skip(transaction))]
#[tracing::instrument(skip_all)]
pub async fn enqueue_delivery_tasks(
transaction: &mut Transaction<'static, Postgres>,
newsletter_issue_id: Uuid,
kind: EmailType,
) -> Result<(), sqlx::Error> {
let query = sqlx::query!(
r#"
INSERT INTO issue_delivery_queue (
newsletter_issue_id,
subscriber_email,
unsubscribe_token,
kind
subscriber_email
)
SELECT $1, email, unsubscribe_token, $2
SELECT $1, email
FROM subscriptions
WHERE status = 'confirmed'
"#,
newsletter_issue_id,
kind.to_string()
);
transaction.execute(query).await?;
Ok(())
}
#[tracing::instrument(name = "Publishing a newsletter", skip_all, fields(title = %form.title))]
#[tracing::instrument(name = "Publishing a newsletter", skip(connection_pool, form))]
pub async fn publish_newsletter(
State(AppState {
connection_pool,
base_url,
..
connection_pool, ..
}): State<AppState>,
Extension(AuthenticatedUser { user_id, .. }): Extension<AuthenticatedUser>,
Form(form): Form<BodyData>,
) -> Result<Response, AppError> {
validate_form(&form).map_err(|e| AdminError::Publish(anyhow::anyhow!(e)))?;
@@ -103,42 +83,39 @@ pub async fn publish_newsletter(
.try_into()
.map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? {
let mut transaction = match try_processing(&connection_pool, &idempotency_key, user_id).await? {
crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response);
}
};
let email_template = StandaloneEmailTemplate {
base_url: &base_url,
text_content: &form.text,
html_content: &form.html,
};
let issue_id = insert_newsletter_issue(&mut transaction, &form.title, &email_template)
let issue_id = insert_newsletter_issue(&mut transaction, &form.title, &form.text, &form.html)
.await
.context("Failed to store newsletter issue details.")?;
enqueue_delivery_tasks(&mut transaction, issue_id, EmailType::Newsletter)
enqueue_delivery_tasks(&mut transaction, issue_id)
.await
.context("Failed to enqueue delivery tasks.")?;
let message = String::from("Your email has been queued for delivery.");
let template = MessageTemplate::success(message);
let message = format!(
r#"The newsletter issue "{}" has been published!"#,
form.title
);
let template = MessageTemplate::Success { message };
let response = Html(template.render().unwrap()).into_response();
let response = save_response(transaction, &idempotency_key, response)
let response = save_response(transaction, &idempotency_key, user_id, response)
.await
.map_err(AdminError::UnexpectedError)?;
Ok(response)
}
fn validate_form(form: &BodyData) -> Result<(), anyhow::Error> {
fn validate_form(form: &BodyData) -> Result<(), &'static str> {
if form.title.is_empty() {
anyhow::bail!("The title was empty.");
return Err("The title was empty");
}
if form.html.is_empty() || form.text.is_empty() {
anyhow::bail!("The content was empty.");
return Err("The content was empty.");
}
Ok(())
}

View File

@@ -1,11 +1,9 @@
use crate::{
authentication::AuthenticatedUser,
idempotency::{IdempotencyKey, save_response, try_processing},
routes::{
AdminError, AppError, EmailType, Path, enqueue_delivery_tasks, insert_newsletter_issue,
},
routes::{AdminError, AppError, enqueue_delivery_tasks, insert_newsletter_issue},
startup::AppState,
templates::{MessageTemplate, NewPostEmailTemplate},
templates::MessageTemplate,
};
use anyhow::Context;
use askama::Template;
@@ -33,16 +31,10 @@ fn validate_form(form: &CreatePostForm) -> Result<(), anyhow::Error> {
}
}
#[tracing::instrument(
name = "Publishing new blog post",
skip(connection_pool, base_url, form)
fields(title = %form.title)
)]
#[tracing::instrument(name = "Creating a post", skip(connection_pool, form))]
pub async fn create_post(
State(AppState {
connection_pool,
base_url,
..
connection_pool, ..
}): State<AppState>,
Extension(AuthenticatedUser { user_id, .. }): Extension<AuthenticatedUser>,
Form(form): Form<CreatePostForm>,
@@ -54,7 +46,7 @@ pub async fn create_post(
.try_into()
.map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? {
let mut transaction = match try_processing(&connection_pool, &idempotency_key, user_id).await? {
crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response);
@@ -65,23 +57,28 @@ pub async fn create_post(
.await
.context("Failed to insert new post in the database.")?;
let newsletter_uuid = create_newsletter(&mut transaction, &base_url, &form.title, &post_id)
let newsletter_uuid = create_newsletter(&mut transaction, &form.title, &form.content, &post_id)
.await
.context("Failed to create newsletter.")?;
enqueue_delivery_tasks(&mut transaction, newsletter_uuid, EmailType::NewPost)
enqueue_delivery_tasks(&mut transaction, newsletter_uuid)
.await
.context("Failed to enqueue delivery tasks.")?;
let template = MessageTemplate::success("Your new post has been published!".into());
let template = MessageTemplate::Success {
message: "Your new post has been saved. Subscribers will be notified.".into(),
};
let response = Html(template.render().unwrap()).into_response();
let response = save_response(transaction, &idempotency_key, response)
let response = save_response(transaction, &idempotency_key, user_id, response)
.await
.map_err(AdminError::UnexpectedError)?;
Ok(response)
}
#[tracing::instrument(name = "Saving new blog post in the database", skip_all)]
#[tracing::instrument(
name = "Saving new post in the database",
skip(transaction, title, content, author)
)]
pub async fn insert_post(
transaction: &mut Transaction<'static, Postgres>,
title: &str,
@@ -104,39 +101,16 @@ pub async fn insert_post(
Ok(post_id)
}
#[tracing::instrument(name = "Creating newsletter for new post", skip_all)]
#[tracing::instrument(
name = "Creating newsletter for new post",
skip(transaction, title, content, _post_id)
)]
pub async fn create_newsletter(
transaction: &mut Transaction<'static, Postgres>,
base_url: &str,
post_title: &str,
post_id: &Uuid,
title: &str,
content: &str,
_post_id: &Uuid,
) -> Result<Uuid, sqlx::Error> {
let template = NewPostEmailTemplate {
base_url,
post_title,
post_id,
post_excerpt: "",
};
insert_newsletter_issue(transaction, post_title, &template).await
}
pub async fn delete_post(
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(post_id): Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!("DELETE FROM posts WHERE post_id = $1", post_id)
.execute(&connection_pool)
.await
.context("Failed to delete post from database.")
.map_err(AppError::unexpected_message)?;
if res.rows_affected() > 1 {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the post in the database."
)))
} else {
let template = MessageTemplate::success("The post has been deleted.".into());
Ok(template.render().unwrap().into_response())
}
// We need to send a special link with a unique ID to determine if the user clicked it or not.
insert_newsletter_issue(transaction, title, content, content).await
}

View File

@@ -1,116 +0,0 @@
use crate::{
domain::SubscriberEntry,
routes::{AppError, Path, Query},
startup::AppState,
templates::{MessageTemplate, SubListTemplate},
};
use anyhow::Context;
use askama::Template;
use axum::{
extract::State,
response::{Html, IntoResponse, Response},
};
use sqlx::PgPool;
use uuid::Uuid;
pub const SUBS_PER_PAGE: i64 = 5;
#[tracing::instrument(name = "Retrieving subscribers from database", skip(connection_pool))]
pub async fn get_subscribers_page(
State(AppState {
connection_pool, ..
}): State<AppState>,
Query(SubsQueryParams { page }): Query<SubsQueryParams>,
) -> Result<Response, AppError> {
let count = get_total_subs(&connection_pool)
.await
.context("Could not fetch total subscribers count from the database.")
.map_err(AppError::unexpected_message)?;
let max_page = get_max_page(count, SUBS_PER_PAGE);
let subscribers = get_subs(&connection_pool, page)
.await
.context("Could not fetch subscribers data.")
.map_err(AppError::unexpected_message)?;
let template = SubListTemplate {
subscribers,
current_page: page,
max_page,
};
Ok(Html(template.render().unwrap()).into_response())
}
#[tracing::instrument(
name = "Deleting subscriber from database",
skip(connection_pool),
fields(email=tracing::field::Empty)
)]
pub async fn delete_subscriber(
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(subscriber_id): Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!(
"DELETE FROM subscriptions WHERE id = $1 RETURNING email",
subscriber_id
)
.fetch_optional(&connection_pool)
.await
.context("Failed to delete subscriber from database.")
.map_err(AppError::unexpected_message)?;
if let Some(record) = res {
tracing::Span::current().record("email", tracing::field::display(&record.email));
let template = MessageTemplate::success(format!(
"The subscriber with email '{}' has been deleted.",
record.email
));
Ok(template.render().unwrap().into_response())
} else {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the subscriber in the database."
)))
}
}
#[tracing::instrument(
name = "Retrieving next subscribers in database",
skip(connection_pool),
fields(offset = tracing::field::Empty)
)]
pub async fn get_subs(
connection_pool: &PgPool,
page: i64,
) -> Result<Vec<SubscriberEntry>, sqlx::Error> {
let offset = (page - 1) * SUBS_PER_PAGE;
tracing::Span::current().record("offset", tracing::field::display(&offset));
let subscribers = sqlx::query_as!(
SubscriberEntry,
"SELECT * FROM subscriptions ORDER BY subscribed_at DESC LIMIT $1 OFFSET $2",
SUBS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
Ok(subscribers)
}
pub async fn get_total_subs(connection_pool: &PgPool) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM subscriptions")
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}
pub fn get_max_page(count: i64, num_per_page: i64) -> i64 {
let mut max_page = count.div_euclid(num_per_page);
if count % num_per_page > 0 {
max_page += 1;
}
max_page
}
#[derive(serde::Deserialize)]
pub struct SubsQueryParams {
page: i64,
}

View File

@@ -1,264 +0,0 @@
use crate::idempotency::{IdempotencyKey, save_response, try_processing};
use crate::routes::{AdminError, get_max_page};
use crate::templates::CommentsPageDashboardTemplate;
use crate::{
domain::CommentEntry,
routes::AppError,
startup::AppState,
templates::{CommentsList, HtmlTemplate, MessageTemplate},
};
use anyhow::Context;
use askama::Template;
use axum::{
Form,
extract::{Path, Query, State},
response::{IntoResponse, Response},
};
use sqlx::{Executor, PgPool, Postgres, Transaction};
use uuid::Uuid;
#[derive(serde::Deserialize)]
pub struct CommentPathParam {
post_id: Uuid,
}
#[derive(serde::Deserialize)]
pub struct CommentForm {
pub author: Option<String>,
pub content: String,
pub idempotency_key: String,
pub user_id: Option<Uuid>,
}
#[tracing::instrument(name = "Posting new comment", skip_all, fields(post_id = %post_id))]
pub async fn post_comment(
Path(CommentPathParam { post_id }): Path<CommentPathParam>,
State(AppState {
connection_pool, ..
}): State<AppState>,
Form(form): Form<CommentForm>,
) -> Result<Response, AppError> {
validate_form(&form)?;
let idempotency_key: IdempotencyKey = form
.idempotency_key
.try_into()
.map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? {
crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response);
}
};
insert_comment(
&mut transaction,
post_id,
form.author,
form.user_id,
form.content,
)
.await
.context("Could not insert comment into database.")?;
let template = HtmlTemplate(MessageTemplate::success(
"Your comment has been posted.".into(),
));
let response = template.into_response();
let response = save_response(transaction, &idempotency_key, response).await?;
Ok(response)
}
fn validate_form(form: &CommentForm) -> Result<(), anyhow::Error> {
if form.content.is_empty() {
anyhow::bail!("Comment content cannot be empty.");
}
Ok(())
}
#[tracing::instrument(name = "Inserting new comment in database", skip_all, fields(comment_id = tracing::field::Empty))]
async fn insert_comment(
transaction: &mut Transaction<'static, Postgres>,
post_id: Uuid,
author: Option<String>,
user_id: Option<Uuid>,
content: String,
) -> Result<Uuid, sqlx::Error> {
let author = if user_id.is_some() {
None
} else {
author
.filter(|s| !s.trim().is_empty())
.map(|s| s.trim().to_string())
};
let content = content.trim();
let comment_id = Uuid::new_v4();
tracing::Span::current().record("comment_id", comment_id.to_string());
let query = sqlx::query!(
"
INSERT INTO comments (user_id, comment_id, post_id, author, content)
VALUES ($1, $2, $3, $4, $5)
",
user_id,
comment_id,
post_id,
author,
content,
);
transaction.execute(query).await?;
Ok(comment_id)
}
pub const COMMENTS_PER_PAGE: i64 = 5;
#[derive(serde::Deserialize)]
pub struct GetCommentsQueryParams {
page: i64,
}
#[tracing::instrument(name = "Fetching comments", skip(connection_pool))]
pub async fn get_comments(
Path(CommentPathParam { post_id }): Path<CommentPathParam>,
Query(GetCommentsQueryParams { page }): Query<GetCommentsQueryParams>,
State(AppState {
connection_pool, ..
}): State<AppState>,
) -> Result<Response, AppError> {
let comments = get_comments_page_for_post(&connection_pool, post_id, page)
.await
.context("Could not fetch comments.")?;
let count = get_comments_count_for_post(&connection_pool, post_id)
.await
.context("Could not fetch comments count")?;
let max_page = get_max_page(count, COMMENTS_PER_PAGE);
let template = HtmlTemplate(CommentsList {
comments,
current_page: page,
max_page,
});
Ok(template.into_response())
}
#[tracing::instrument(name = "Fetching all comments", skip(connection_pool))]
pub async fn get_all_comments(
Query(GetCommentsQueryParams { page }): Query<GetCommentsQueryParams>,
State(AppState {
connection_pool, ..
}): State<AppState>,
) -> Result<Response, AppError> {
let comments = get_comments_page(&connection_pool, page)
.await
.context("Could not fetch comments.")?;
let count = get_comments_count(&connection_pool)
.await
.context("Could not fetch comments count")?;
let comments_max_page = get_max_page(count, COMMENTS_PER_PAGE);
let template = HtmlTemplate(CommentsPageDashboardTemplate {
comments,
comments_current_page: page,
comments_max_page,
});
Ok(template.into_response())
}
pub async fn delete_comment(
State(AppState {
connection_pool, ..
}): State<AppState>,
crate::routes::Path(comment_id): crate::routes::Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!("DELETE FROM comments WHERE comment_id = $1", comment_id)
.execute(&connection_pool)
.await
.context("Failed to delete comment from database.")
.map_err(AppError::unexpected_message)?;
if res.rows_affected() > 1 {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the comment in the database."
)))
} else {
let template = MessageTemplate::success("The comment has been deleted.".into());
Ok(template.render().unwrap().into_response())
}
}
pub async fn get_comments_page_for_post(
connection_pool: &PgPool,
post_id: Uuid,
page: i64,
) -> Result<Vec<CommentEntry>, sqlx::Error> {
let offset = (page - 1) * COMMENTS_PER_PAGE;
let mut comments = sqlx::query_as!(
CommentEntry,
r#"
SELECT c.user_id as "user_id?", u.username as "username?", c.comment_id, c.post_id, c.author, c.content, c.published_at
FROM comments c
LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL
WHERE c.post_id = $1
ORDER BY c.published_at DESC
LIMIT $2
OFFSET $3
"#,
post_id,
COMMENTS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
for comment in comments.iter_mut() {
if let Some(user_id) = comment.user_id {
let record = sqlx::query!(
"SELECT username, full_name FROM users WHERE user_id = $1",
user_id
)
.fetch_one(connection_pool)
.await?;
let author = record.full_name.unwrap_or(record.username);
comment.author = Some(author);
}
}
Ok(comments)
}
pub async fn get_comments_count_for_post(
connection_pool: &PgPool,
post_id: Uuid,
) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM comments WHERE post_id = $1", post_id)
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}
pub async fn get_comments_page(
connection_pool: &PgPool,
page: i64,
) -> Result<Vec<CommentEntry>, sqlx::Error> {
let offset = (page - 1) * COMMENTS_PER_PAGE;
let comments = sqlx::query_as!(
CommentEntry,
r#"
SELECT c.user_id as "user_id?", u.username as "username?", c.comment_id, c.post_id, c.author, c.content, c.published_at
FROM comments c
LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL
ORDER BY published_at DESC
LIMIT $1
OFFSET $2
"#,
COMMENTS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
Ok(comments)
}
pub async fn get_comments_count(connection_pool: &PgPool) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM comments")
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}

View File

@@ -1,5 +1,5 @@
use axum::http::StatusCode;
use axum::{http::StatusCode, response::IntoResponse};
pub async fn health_check() -> StatusCode {
pub async fn health_check() -> impl IntoResponse {
StatusCode::OK
}

View File

@@ -1,7 +1,8 @@
use crate::templates::{HomeTemplate, HtmlTemplate};
use axum::response::{IntoResponse, Response};
use askama::Template;
use axum::response::Html;
pub async fn home() -> Response {
let template = HtmlTemplate(HomeTemplate);
template.into_response()
use crate::templates::HomeTemplate;
pub async fn home() -> Html<String> {
Html(HomeTemplate.render().unwrap())
}

View File

@@ -15,8 +15,6 @@ use axum::{
};
use axum::{http::StatusCode, response::Redirect};
use secrecy::SecretString;
use sqlx::PgPool;
use uuid::Uuid;
#[derive(serde::Deserialize)]
pub struct LoginFormData {
@@ -31,13 +29,12 @@ pub async fn get_login(session: TypedSession) -> Result<Response, AppError> {
.context("Failed to retrieve user id from data store.")?
.is_some()
{
Ok(Redirect::to("dashboard").into_response())
Ok(Redirect::to("/admin/dashboard").into_response())
} else {
Ok(Html(LoginTemplate.render().unwrap()).into_response())
}
}
#[tracing::instrument(name = "Authenticating user", skip_all, fields(name = %form.username))]
pub async fn post_login(
session: TypedSession,
State(AppState {
@@ -50,11 +47,8 @@ pub async fn post_login(
password: form.password,
};
tracing::Span::current().record("username", tracing::field::display(&credentials.username));
let (user_id, role) = validate_credentials(credentials, &connection_pool).await?;
let user_id = validate_credentials(credentials, &connection_pool).await?;
tracing::Span::current().record("user_id", tracing::field::display(&user_id));
record_login(&connection_pool, &user_id)
.await
.context("Failed to register new login event.")?;
session.renew().await.context("Failed to renew session.")?;
session
@@ -65,20 +59,8 @@ pub async fn post_login(
.insert_username(form.username)
.await
.context("Failed to insert username in session data store.")?;
session
.insert_role(role)
.await
.context("Failed to insert role in session data store.")?;
let mut headers = HeaderMap::new();
headers.insert("HX-Redirect", "/dashboard".parse().unwrap());
headers.insert("HX-Redirect", "/admin/dashboard".parse().unwrap());
Ok((StatusCode::OK, headers).into_response())
}
#[tracing::instrument(name = "Recording new login event", skip_all, fields(user_id = %user_id))]
async fn record_login(connection_pool: &PgPool, user_id: &Uuid) -> Result<(), sqlx::Error> {
sqlx::query!("INSERT INTO user_logins (user_id) VALUES ($1)", user_id)
.execute(connection_pool)
.await?;
Ok(())
}

Some files were not shown because too many files have changed in this diff Show More