Compare commits

..

69 Commits

Author SHA1 Message Date
Alphonse Paix
998c156d3c update main.css
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-22 15:44:59 +02:00
Alphonse Paix
f1ce77a762 Unsubscribe option available on website 2025-09-22 15:44:02 +02:00
Alphonse Paix
a37123a32d Tests update
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-22 12:40:17 +02:00
Alphonse Paix
1cc4871dd2 Templates refactoring
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
Inject HTML directly and fix footer out of main content <div>
2025-09-22 01:43:36 +02:00
Alphonse Paix
72fa283a6d Unsubscribe link in emails sent
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-22 01:25:36 +02:00
Alphonse Paix
7af07ea0dd Basic unsubscribe endpoint 2025-09-21 17:49:31 +02:00
Alphonse Paix
6a963a8c0d HTML and plain text for new post mail notifications
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-21 03:45:29 +02:00
Alphonse Paix
abca3a23b7 Dashboard template revision
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-21 02:01:42 +02:00
Alphonse Paix
6b3cea9fb2 table wrapping
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 19:19:57 +02:00
Alphonse Paix
79979e6a58 responsive tables and post page redesign for narrow viewport
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 17:39:25 +02:00
Alphonse Paix
771130e9f0 remove unused use statement
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 16:46:21 +02:00
Alphonse Paix
c618de9732 markdown with GFM extension
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 16:45:16 +02:00
Alphonse Paix
c11e552d0a fix mobile menu margins
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 16:25:47 +02:00
Alphonse Paix
6308ac279e <pre> element overflow 2025-09-20 16:05:20 +02:00
Alphonse Paix
ddb837be56 Markdown rendering 2025-09-20 16:01:51 +02:00
Alphonse Paix
8f0b59775e Basic markdown with Tailwind CSS Typography 2025-09-20 14:03:13 +02:00
Alphonse Paix
6d2d486866 Responsive navbar
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 05:05:49 +02:00
Alphonse Paix
40dfe1aed8 Templates refactoring
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 04:43:55 +02:00
Alphonse Paix
b52b676dc0 Error handling refactor and 500 page/message templates 2025-09-20 04:06:48 +02:00
Alphonse Paix
f5cd91108a Refactor admin routes to use new AppError struct in responses 2025-09-20 01:08:05 +02:00
Alphonse Paix
01d2add44b Askama message template 2025-09-20 00:51:46 +02:00
Alphonse Paix
46a6905b12 Logo in the navbar
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-20 00:20:58 +02:00
Alphonse Paix
91e8b5f001 Templates adjustments
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-19 20:07:38 +02:00
Alphonse Paix
a75c410948 404 page 2025-09-19 19:34:06 +02:00
Alphonse Paix
95c4d3fdd0 Posts dedicated page with cards linking to specific post 2025-09-19 01:04:10 +02:00
Alphonse Paix
71d4872878 Refactor test suite to handle 303 See Other 2025-09-18 21:00:01 +02:00
Alphonse Paix
3120c700a4 query metadata 2025-09-18 20:09:03 +02:00
Alphonse Paix
08d5f611b5 Brought back newsletter form on admin page 2025-09-18 18:40:03 +02:00
Alphonse Paix
54218f92a9 Admin can now write posts
Posts can be displayed on the website. Subscribers are automatically
notified by email. This gives the opportunity to track explicitly how
many people followed the link provided in the emails sent without being
intrusive (no invisible image).
2025-09-18 17:22:33 +02:00
Alphonse Paix
044991d623 Fix redirect issues
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
Dashboard button now correctly redirects to login page if user is not
logged in while login page redirects to dashboard the other way around.
2025-09-17 23:57:45 +02:00
Alphonse Paix
72d0306e35 Update README
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-17 14:35:39 +02:00
Alphonse Paix
e191d35664 Formatting 2025-09-17 14:24:45 +02:00
Alphonse Paix
b5f0f448d7 Test suite refactoring to match new htmx HTML swapping in pages 2025-09-17 14:16:27 +02:00
Alphonse Paix
859247d900 HX-Redirect to handle redirections with htmx 2025-09-17 13:16:56 +02:00
Alphonse Paix
2d336ed000 Use HTML swap to display success and error messages 2025-09-17 03:40:23 +02:00
Alphonse Paix
88dad022ce Basic dashboard for newsletter issue and password systems 2025-09-17 01:47:03 +02:00
Alphonse Paix
1d027b5460 htmx and Tailwind CSS production setup 2025-09-16 20:30:34 +02:00
Alphonse Paix
38208654dc Run on port 8080 for local env + minor fix for subscription confirm page 2025-09-16 19:09:11 +02:00
Alphonse Paix
0a2c4a32c1 Update README
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
2025-09-16 16:53:45 +02:00
Alphonse Paix
b736e2fe8d Confirmation page and minor improvements to homepage and form messages
Basic redirect with flash messages for success and error messages
2025-09-16 16:47:28 +02:00
Alphonse Paix
f948728348 Merge remote-tracking branch 'origin/main' into askama 2025-09-16 15:25:28 +02:00
Alphonse Paix
5cdc3ea29d Remove name from subscriptions table 2025-09-16 15:24:08 +02:00
Alphonse Paix
56035fab30 Askama + htmx for frontend
Server-side rendering with htmx and Tailwind CSS for the styling
2025-09-16 01:47:18 +02:00
Alphonse Paix
a9c6cb36a5 Update config to use self-hosted email server 2025-09-14 19:29:02 +00:00
Alphonse Paix
ba6b2dbd93 Better datastores security
Some checks failed
Rust / Test (push) Has been cancelled
Rust / Rustfmt (push) Has been cancelled
Rust / Clippy (push) Has been cancelled
Rust / Code coverage (push) Has been cancelled
Localhost only exposed ports and stronger password for Postgres local
environment instance
2025-09-07 12:46:27 +02:00
Alphonse Paix
1ec51f0caf Update workflow to include redis 2025-09-05 19:23:11 +02:00
Alphonse Paix
54b0512f3f Use env vars for TLS files 2025-09-05 18:27:46 +02:00
Alphonse Paix
8d6cab41d0 Support for TLS encryption 2025-09-05 18:13:35 +02:00
Alphonse Paix
a4104ca1b2 Register form and confirmation messages 2025-09-04 23:39:53 +02:00
Alphonse Paix
f8dee295cd Fault-tolerant delivery system 2025-09-04 02:54:49 +02:00
Alphonse Paix
9a184b93ac Authentication and form for newsletter publishing 2025-09-01 15:47:27 +02:00
Alphonse Paix
d96a401d99 Admin dashboard and sessions 2025-09-01 03:08:43 +02:00
Alphonse Paix
3dce578ba0 Flash messages using axum-messages 2025-08-30 01:39:12 +02:00
Alphonse Paix
8447d050d6 Handler to send emails to confirmed subscribers 2025-08-27 12:14:11 +02:00
Alphonse Paix
9193f2020d Error handling with thiserror and anyhow 2025-08-26 12:47:22 +02:00
Alphonse Paix
4ce25a8136 Fix send email request body 2025-08-25 18:51:37 +02:00
Alphonse Paix
dfd3300371 Query metadata 2025-08-25 17:52:44 +02:00
Alphonse Paix
d1cf1f6c4f Confirm subscription endpoint 2025-08-25 17:46:03 +02:00
Alphonse Paix
73ff7c04fe Query metadata, migrations and formatting 2025-08-24 12:45:32 +02:00
Alphonse Paix
954772e9db Set status to 'confirmed' for new subscribers 2025-08-24 11:43:08 +02:00
Alphonse Paix
4389873bf4 Email client, application startup logic and tests 2025-08-24 11:31:03 +02:00
Alphonse Paix
85ab04f254 Parse data from incoming request 2025-08-23 11:13:57 +02:00
Alphonse Paix
4d049a744a Fix bug when reading environment variables 2025-08-22 16:29:11 +02:00
Alphonse Paix
a7473bb7f5 Environment variables at runtime to connect to database 2025-08-22 16:01:20 +02:00
Alphonse Paix
1567f94b1f Docker for deployment 2025-08-22 14:25:34 +02:00
Alphonse Paix
59817083eb Update GitHub workflow 2025-08-22 08:44:02 +02:00
Alphonse Paix
b280f10c40 Fix incorrect database query in test suite 2025-08-22 08:28:22 +02:00
Alphonse Paix
5cc5758097 Telemetry 2025-08-22 08:14:59 +02:00
Alphonse Paix
ded2a611e2 Database connection and user registration 2025-08-21 15:38:12 +02:00
157 changed files with 1547 additions and 6461 deletions

View File

@@ -1,3 +1,3 @@
[target.x86_64-unknown-linux-gnu] [target.x86_64-unknown-linux-gnu]
linker = "clang" linker = "clang"
rustflags = ["-C", "link-arg=-fuse-ld=mold"] rustflags = ["-C", "link-arg=-fuse-ld=/usr/bin/mold"]

View File

@@ -4,9 +4,3 @@
Dockerfile Dockerfile
/scripts /scripts
/migrations /migrations
/node_modules
/assets/css/main.css
/.github
README.md
/tests
/configuration/local.yaml

1
.env Normal file
View File

@@ -0,0 +1 @@
DATABASE_URL="postgres://postgres:Jq09NF6Y8ZXJS4jd9c8U@localhost:5432/newsletter"

View File

@@ -1,6 +1,11 @@
# The name of your workflow. GitHub displays the names of your workflows on your repository's "Actions" tab
name: Rust name: Rust
# To automatically trigger the workflow
on: on:
# NB: this differs from the book's project!
# These settings allow us to run this specific CI pipeline for PRs against
# this specific branch (a.k.a. book chapter).
push: push:
branches: branches:
- main - main
@@ -13,51 +18,74 @@ env:
CARGO_TERM_COLOR: always CARGO_TERM_COLOR: always
SQLX_VERSION: 0.8.6 SQLX_VERSION: 0.8.6
SQLX_FEATURES: "rustls,postgres" SQLX_FEATURES: "rustls,postgres"
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter APP_USER: app
APP_DATABASE__HOST: postgres APP_USER_PWD: secret
APP_KV_STORE__HOST: redis APP_DB_NAME: newsletter
# A workflow run is made up of one or more jobs, which run in parallel by default
# Each job runs in a runner environment specified by runs-on
jobs: jobs:
# Unique identifier of our job (`job_id`)
test: test:
# Sets the name `Test` for the job, which is displayed in the GitHub UI
name: Test name: Test
# Containers must run in Linux based operating systems
runs-on: ubuntu-latest runs-on: ubuntu-latest
# Service containers to run alongside the `test` container job
services: services:
# Label used to access the service container
postgres: postgres:
# Docker Hub image
image: postgres image: postgres
# Environment variables scoped only for the `postgres` element
env: env:
POSTGRES_USER: postgres POSTGRES_USER: postgres
POSTGRES_PASSWORD: password POSTGRES_PASSWORD: password
POSTGRES_DB: newsletter POSTGRES_DB: postgres
# When you map ports using the ports keyword, GitHub uses the --publish command to publish the containers ports to the Docker host
# Opens tcp port 5432 on the host and service container
ports: ports:
- 15432:5432 - 5432:5432
redis: redis:
image: redis image: redis:7
ports: ports:
- 16379:6379 - 6379:6379
steps: steps:
# Downloads a copy of the code in your repository before running CI tests
- name: Check out repository code - name: Check out repository code
# The uses keyword specifies that this step will run v4 of the actions/checkout action.
# This is an action that checks out your repository onto the runner, allowing you to run scripts or other actions against your code (such as build and test tools).
# You should use the checkout action any time your workflow will run against the repository's code.
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Install mold linker # This GitHub Action installs a Rust toolchain using rustup. It is designed for one-line concise usage and good defaults.
run: | # It also takes care of caching intermediate build artifacts.
sudo apt-get update
sudo apt-get install -y mold clang
- name: Install the Rust toolchain - name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1 uses: actions-rust-lang/setup-rust-toolchain@v1
with:
cache: false
- name: Install sqlx-cli - name: Install sqlx-cli
run: cargo install sqlx-cli run: cargo install sqlx-cli
--version=${{ env.SQLX_VERSION }} --version=${{ env.SQLX_VERSION }}
--features ${{ env.SQLX_FEATURES }} --features ${{ env.SQLX_FEATURES }}
--no-default-features --no-default-features
--locked --locked
- name: Create app user in Postgres
run: |
sudo apt-get install postgresql-client
# Create the application user
CREATE_QUERY="CREATE USER ${APP_USER} WITH PASSWORD '${APP_USER_PWD}';"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${CREATE_QUERY}"
# Grant create db privileges to the app user
GRANT_QUERY="ALTER USER ${APP_USER} CREATEDB;"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${GRANT_QUERY}"
- name: Migrate database - name: Migrate database
run: cargo sqlx migrate run run: SKIP_DOCKER=true ./scripts/init_db.sh
- name: Run tests - name: Run tests
run: TEST_LOG=true cargo test run: cargo test
- name: Check that queries are fresh - name: Check that queries are fresh
run: cargo sqlx prepare --check --workspace run: cargo sqlx prepare --check --workspace
# `fmt` container job
fmt: fmt:
name: Rustfmt name: Rustfmt
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -67,30 +95,31 @@ jobs:
uses: actions-rust-lang/setup-rust-toolchain@v1 uses: actions-rust-lang/setup-rust-toolchain@v1
with: with:
components: rustfmt components: rustfmt
cache: false
- name: Enforce formatting - name: Enforce formatting
run: cargo fmt --check run: cargo fmt --check
# `clippy` container job
clippy: clippy:
name: Clippy name: Clippy
runs-on: ubuntu-latest runs-on: ubuntu-latest
env: env:
# This environment variable forces sqlx to use its offline mode,
# which means that it will not attempt to connect to a database
# when running the tests. It'll instead use the cached query results.
# We check that the cached query results are up-to-date in another job,
# to speed up the overall CI pipeline.
# This will all be covered in detail in chapter 5.
SQLX_OFFLINE: true SQLX_OFFLINE: true
steps: steps:
- name: Check out repository code - uses: actions/checkout@v4
uses: actions/checkout@v4
- name: Install mold linker
run: |
sudo apt-get update
sudo apt-get install -y mold clang
- name: Install the Rust toolchain - name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1 uses: actions-rust-lang/setup-rust-toolchain@v1
with: with:
components: clippy components: clippy
cache: false
- name: Linting - name: Linting
run: cargo clippy -- -D warnings run: cargo clippy -- -D warnings
# `coverage` container job
coverage: coverage:
name: Code coverage name: Code coverage
runs-on: ubuntu-latest runs-on: ubuntu-latest
@@ -100,35 +129,45 @@ jobs:
env: env:
POSTGRES_USER: postgres POSTGRES_USER: postgres
POSTGRES_PASSWORD: password POSTGRES_PASSWORD: password
POSTGRES_DB: newsletter POSTGRES_DB: postgres
ports: ports:
- 15432:5432 - 5432:5432
redis: redis:
image: redis image: redis:7
ports: ports:
- 16379:6379 - 6379:6379
steps: steps:
- uses: actions/checkout@v4 - uses: actions/checkout@v4
- name: Install mold linker
run: |
sudo apt-get update
sudo apt-get install -y mold clang
- name: Install the Rust toolchain - name: Install the Rust toolchain
uses: actions-rust-lang/setup-rust-toolchain@v1 uses: actions-rust-lang/setup-rust-toolchain@v1
with: with:
components: llvm-tools-preview components: llvm-tools-preview
cache: false
- name: Install sqlx-cli - name: Install sqlx-cli
run: cargo install sqlx-cli run: cargo install sqlx-cli
--version=${{ env.SQLX_VERSION }} --version=${{ env.SQLX_VERSION }}
--features ${{ env.SQLX_FEATURES }} --features ${{ env.SQLX_FEATURES }}
--no-default-features --no-default-features
--locked --locked
- name: Create app user in Postgres
run: |
sudo apt-get install postgresql-client
# Create the application user
CREATE_QUERY="CREATE USER ${APP_USER} WITH PASSWORD '${APP_USER_PWD}';"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${CREATE_QUERY}"
# Grant create db privileges to the app user
GRANT_QUERY="ALTER USER ${APP_USER} CREATEDB;"
PGPASSWORD="password" psql -U "postgres" -h "localhost" -c "${GRANT_QUERY}"
- name: Migrate database - name: Migrate database
run: cargo sqlx migrate run run: SKIP_DOCKER=true ./scripts/init_db.sh
- name: Install cargo-llvm-cov - name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov uses: taiki-e/install-action@cargo-llvm-cov
- name: Generate code coverage - name: Generate code coverage
run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Generate report - name: Generate report
run: cargo llvm-cov report --html --output-dir coverage run: cargo llvm-cov report --html --output-dir coverage
- uses: actions/upload-artifact@v4
with:
name: "Coverage report"
path: coverage/

4
.gitignore vendored
View File

@@ -1,6 +1,2 @@
/target /target
/node_modules /node_modules
.env
/.idea
docker-compose.yml

View File

@@ -1,18 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO comments (user_id, comment_id, post_id, author, content)\n VALUES ($1, $2, $3, $4, $5)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Uuid",
"Uuid",
"Text",
"Text"
]
},
"nullable": []
},
"hash": "02fff619c0ff8cb4f9946991be0ce795385b9e6697dcaa52f915acdbb1460e65"
}

View File

@@ -1,64 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.post_id, p.author_id, u.username AS author, u.full_name,\n p.title, p.content, p.published_at, last_modified\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n WHERE p.post_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
true
]
},
"hash": "059162eba48cf5f519d0d8b6ce63575ced91941b8c55c986b8c5591c7d9b09e4"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM notifications_delivered WHERE opened = TRUE",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "06f07a7522f3ee8e2cdfe5a7988a46f9a2598aa9c0618d00f6287978d5ce28ca"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM idempotency\n WHERE created_at < NOW() - INTERVAL '1 hour'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": []
},
"nullable": []
},
"hash": "1e1a90042e89bd8662df3bae15bc7506146cff102034664c77ab0fc68b9480f5"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n SELECT\n response_status_code as \"response_status_code!\",\n response_headers as \"response_headers!: Vec<HeaderPairRecord>\",\n response_body as \"response_body!\"\n FROM idempotency\n WHERE idempotency_key = $1\n ", "query": "\n SELECT\n response_status_code as \"response_status_code!\",\n response_headers as \"response_headers!: Vec<HeaderPairRecord>\",\n response_body as \"response_body!\"\n FROM idempotency\n WHERE\n user_id = $1\n AND idempotency_key = $2\n ",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@@ -44,6 +44,7 @@
], ],
"parameters": { "parameters": {
"Left": [ "Left": [
"Uuid",
"Text" "Text"
] ]
}, },
@@ -53,5 +54,5 @@
true true
] ]
}, },
"hash": "74d92b078198c3f73edc272c788249b14b62c59365d745d6a2e314cd9c5db1e9" "hash": "1fc498c8ccbf46f3e00b915e3b3973eb8d44a83a7df6dd7744dc56a2e94a0aa5"
} }

View File

@@ -1,64 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.author_id, u.username as author, u.full_name,\n p.post_id, p.title, p.content, p.published_at, p.last_modified\n FROM posts p\n INNER JOIN users u ON p.author_id = u.user_id\n WHERE p.author_id = $1\n ORDER BY p.published_at DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
true,
false,
false,
false,
false,
true
]
},
"hash": "1fc92c14786c21d24951341e3a8149964533b7627d2d073eeac7b7d3230513ce"
}

View File

@@ -1,44 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, password_hash, role as \"role: Role\"\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "password_hash",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
false
]
},
"hash": "22c9449522dcf495d9f49c16ca433aa07a0d1daae4884789ba1e36a918e7dfd1"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO idempotency (idempotency_key, created_at)\n VALUES ($1, now())\n ON CONFLICT DO NOTHING\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text"
]
},
"nullable": []
},
"hash": "3124db53d9e1fe0701a2fc70eea98e001fef4b75c24d33d8dd595f6b483e8f65"
}

View File

@@ -1,10 +1,11 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n UPDATE idempotency\n SET\n response_status_code = $2,\n response_headers = $3,\n response_body = $4\n WHERE idempotency_key = $1\n ", "query": "\n UPDATE idempotency\n SET\n response_status_code = $3,\n response_headers = $4,\n response_body = $5\n WHERE\n user_id = $1\n AND idempotency_key = $2\n ",
"describe": { "describe": {
"columns": [], "columns": [],
"parameters": { "parameters": {
"Left": [ "Left": [
"Uuid",
"Text", "Text",
"Int2", "Int2",
{ {
@@ -36,5 +37,5 @@
}, },
"nullable": [] "nullable": []
}, },
"hash": "b64d5c2e51f328effc8f4687066db96ad695c575fb66195febcdf95c1539a153" "hash": "32701e61ea14e25608b5f6b05289d08d422e9629d6aee98ac1dcbd50f1edbfe1"
} }

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM posts",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "3d7376ca79ffd159830fc6d43042d5fe761b6d330924bde7c5fc0f17f533def9"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM notifications_delivered",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "3f4aceeab03c1c7352d6bed39d397e17d1fc934015d53754f9b0055c4701ee21"
}

View File

@@ -0,0 +1,15 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO idempotency (user_id, idempotency_key, created_at)\n VALUES ($1, $2, now())\n ON CONFLICT DO NOTHING\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": []
},
"hash": "409cb2c83e34fba77b76f031cb0846a8f2716d775c3748887fb0c50f0e0a565b"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM subscriptions WHERE id = $1 RETURNING email",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "email",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "4141df8c45db179016d8e87b023b572bec7e04a6f3324aa17de7e7a9b1fb32ef"
}

View File

@@ -1,25 +1,20 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n SELECT newsletter_issue_id, title, text_content, html_content\n FROM newsletter_issues\n WHERE newsletter_issue_id = $1\n ", "query": "\n SELECT title, text_content, html_content\n FROM newsletter_issues\n WHERE newsletter_issue_id = $1\n ",
"describe": { "describe": {
"columns": [ "columns": [
{ {
"ordinal": 0, "ordinal": 0,
"name": "newsletter_issue_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "title", "name": "title",
"type_info": "Text" "type_info": "Text"
}, },
{ {
"ordinal": 2, "ordinal": 1,
"name": "text_content", "name": "text_content",
"type_info": "Text" "type_info": "Text"
}, },
{ {
"ordinal": 3, "ordinal": 2,
"name": "html_content", "name": "html_content",
"type_info": "Text" "type_info": "Text"
} }
@@ -30,11 +25,10 @@
] ]
}, },
"nullable": [ "nullable": [
false,
false, false,
false, false,
false false
] ]
}, },
"hash": "f682b1791fb9871c5f7416711caf32637d6303b2c166ef89e7f725b309d2219f" "hash": "43116d4e670155129aa69a7563ddc3f7d01ef3689bb8de9ee1757b401ad95b46"
} }

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO issue_delivery_queue (\n newsletter_issue_id,\n subscriber_email,\n unsubscribe_token,\n kind\n )\n SELECT $1, email, unsubscribe_token, $2\n FROM subscriptions\n WHERE status = 'confirmed'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text"
]
},
"nullable": []
},
"hash": "5d9039a01feaca50218a1c791439b2bd3817582798027c00d59d43089531ecc0"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, role as \"role: Role\", full_name, bio, member_since\n FROM users\n WHERE user_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "bio",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "member_since",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
true,
true,
false
]
},
"hash": "601884180bc841dc0762008a819218620fc05169fe3bb80b7635fbe9e227056b"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM subscriptions",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "68a00cae18e40dc76ffea61dfc0ea84d8cb09502b24c11dbb8d403419899dfd1"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "\n SELECT newsletter_issue_id, subscriber_email, unsubscribe_token, kind\n FROM issue_delivery_queue\n FOR UPDATE\n SKIP LOCKED\n LIMIT 1\n ", "query": "\n SELECT newsletter_issue_id, subscriber_email, unsubscribe_token\n FROM issue_delivery_queue\n FOR UPDATE\n SKIP LOCKED\n LIMIT 1\n ",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@@ -17,22 +17,16 @@
"ordinal": 2, "ordinal": 2,
"name": "unsubscribe_token", "name": "unsubscribe_token",
"type_info": "Text" "type_info": "Text"
},
{
"ordinal": 3,
"name": "kind",
"type_info": "Text"
} }
], ],
"parameters": { "parameters": {
"Left": [] "Left": []
}, },
"nullable": [ "nullable": [
false,
false, false,
false, false,
false false
] ]
}, },
"hash": "3b79eca713fe7e167578537399436f5cb1171a7e89c398e005ad41ee12aaf91f" "hash": "6d21a0dd6ef2ea03ce82248ceceab76bb486237ff8e4a2ccd4dbf2b73c496048"
} }

View File

@@ -1,60 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, role as \"role: Role\", full_name, bio, member_since\n FROM users\n ORDER BY member_since DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "bio",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "member_since",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": []
},
"nullable": [
false,
false,
false,
true,
true,
false
]
},
"hash": "73dbf3fb780272b1849cd8aa2ecfb59774b1c46bf52181b6298eebccbc86e438"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT login_time FROM user_logins\n WHERE user_id = $1\n ORDER BY login_time DESC\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "login_time",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "769e8762bd2173c088d85fc132326b05a08e67092eac4c3a7aff8a49d086b5a0"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n DELETE FROM subscriptions\n WHERE status = 'pending_confirmation'\n AND subscribed_at < NOW() - INTERVAL '24 hours'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": []
},
"nullable": []
},
"hash": "7eccf0027753bc1c42897aef12c9350eca023f3be52e24530127d06c3c449104"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT author_id FROM posts WHERE post_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "author_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false
]
},
"hash": "84fcada696e1be5db55ef276e120ffef9adf7f5a4f5c4d5975b85e008e15620b"
}

View File

@@ -1,27 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO users (user_id, username, password_hash, role)\n VALUES ($1, $2, $3, $4)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text",
"Text",
{
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
]
},
"nullable": []
},
"hash": "878036fa48e738387e4140d5dc7eccba477794a267f2952aab684028b7c6e286"
}

View File

@@ -1,59 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT c.user_id as \"user_id?\", u.username as \"username?\", c.comment_id, c.post_id, c.author, c.content, c.published_at\n FROM comments c\n LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL\n ORDER BY published_at DESC\n LIMIT $1\n OFFSET $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id?",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username?",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "comment_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
true,
false,
false,
false,
true,
false,
false
]
},
"hash": "886de678764ebf7f96fe683d3b685d176f0a41043c7ade8b659a9bd167a2d063"
}

View File

@@ -1,17 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE users\n SET username = $1, full_name = $2, bio = $3\n WHERE user_id = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Text",
"Uuid"
]
},
"nullable": []
},
"hash": "8dc27ae224c7ae3c99c396302357514d66e843dc4b3ee4ab58c628b6c9797fdd"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM subscriptions WHERE status = 'confirmed'",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "95a6533f617e7bae589b00548c73425b2991237b8c823dd7c863e6dad002d4b6"
}

View File

@@ -1,37 +1,36 @@
{ {
"db_name": "PostgreSQL", "db_name": "PostgreSQL",
"query": "SELECT * FROM subscriptions ORDER BY subscribed_at DESC LIMIT $1 OFFSET $2", "query": "\n SELECT p.post_id, u.username AS author, p.title, p.content, p.published_at\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n ORDER BY p.published_at DESC\n LIMIT $1\n ",
"describe": { "describe": {
"columns": [ "columns": [
{ {
"ordinal": 0, "ordinal": 0,
"name": "id", "name": "post_id",
"type_info": "Uuid" "type_info": "Uuid"
}, },
{ {
"ordinal": 1, "ordinal": 1,
"name": "email", "name": "author",
"type_info": "Text" "type_info": "Text"
}, },
{ {
"ordinal": 2, "ordinal": 2,
"name": "subscribed_at", "name": "title",
"type_info": "Timestamptz" "type_info": "Text"
}, },
{ {
"ordinal": 3, "ordinal": 3,
"name": "status", "name": "content",
"type_info": "Text" "type_info": "Text"
}, },
{ {
"ordinal": 4, "ordinal": 4,
"name": "unsubscribe_token", "name": "published_at",
"type_info": "Text" "type_info": "Timestamptz"
} }
], ],
"parameters": { "parameters": {
"Left": [ "Left": [
"Int8",
"Int8" "Int8"
] ]
}, },
@@ -40,8 +39,8 @@
false, false,
false, false,
false, false,
true false
] ]
}, },
"hash": "a6cb227efa5ac12189e662d68b8dcc39032f308f211f603dfcf539b7b071b8e3" "hash": "9ba5df2593c5dc21de727c16f03a76e4922b940c0877132cd5f622c725b9b123"
} }

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "UPDATE notifications_delivered SET opened = TRUE WHERE email_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "9fc831553927814e21dd2aa4ff92d06c32e318c7536918d5adbaf5eaf5777e3d"
}

View File

@@ -0,0 +1,28 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, password_hash\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "password_hash",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false
]
},
"hash": "acf1b96c82ddf18db02e71a0e297c822b46f10add52c54649cf599b883165e58"
}

View File

@@ -1,17 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n UPDATE posts\n SET title = $1, content = $2, last_modified = $3 WHERE post_id = $4\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Text",
"Text",
"Timestamptz",
"Uuid"
]
},
"nullable": []
},
"hash": "aef1e780d14be61aa66ae8771309751741068694b291499ee1371de693c6a654"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM posts WHERE post_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "b47161386b21432693aa3827963e8167c942e395687cd5ffecb7c064ca2dde70"
}

View File

@@ -0,0 +1,46 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.post_id, u.username AS author, p.title, p.content, p.published_at\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n WHERE p.post_id = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "published_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
false,
false,
false,
false
]
},
"hash": "bccf441e3c1c29ddf6f7f13f7a333adf733abc527da03b12c91422b9b20f3a6f"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM comments WHERE post_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
null
]
},
"hash": "bd08bf95dc1c8c0c7678bc509df7ce776e839846f29981e2e0bdfd382de9370f"
}

View File

@@ -1,28 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT username, full_name FROM users WHERE user_id = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 1,
"name": "full_name",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": [
false,
true
]
},
"hash": "bfd02c92fb5e0c8748b172bf59a77a477b432ada1f41090571f4fe0e685b1b1b"
}

View File

@@ -0,0 +1,14 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO issue_delivery_queue (\n newsletter_issue_id,\n subscriber_email,\n unsubscribe_token\n )\n SELECT $1, email, unsubscribe_token\n FROM subscriptions\n WHERE status = 'confirmed'\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "ca8fe28bbf395e1c62a495f7299d404043b35f44f639b0edde61ed9e1a7f2944"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM comments WHERE comment_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "caf9f2603db6bc8b715cad188501c12f5de5fae49cd04271471f1337a3232f58"
}

View File

@@ -1,65 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT p.post_id, p.author_id, u.username AS author, u.full_name,\n p.title, p.content, p.published_at, p.last_modified\n FROM posts p\n LEFT JOIN users u ON p.author_id = u.user_id\n ORDER BY p.published_at DESC\n LIMIT $1\n OFFSET $2\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "author_id",
"type_info": "Uuid"
},
{
"ordinal": 2,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 4,
"name": "title",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
},
{
"ordinal": 7,
"name": "last_modified",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Int8",
"Int8"
]
},
"nullable": [
false,
false,
false,
true,
false,
false,
false,
true
]
},
"hash": "dc3c1b786b4f4bd65f625922ce05eab4cb161f3de6c6e676af778f7749af5710"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "DELETE FROM users WHERE user_id = $1",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "dfa520877c017cd5808d02c24ef2d71938b68093974f335a4d89df91874fdaa2"
}

View File

@@ -1,62 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT user_id, username, full_name, role as \"role: Role\", member_since, bio\n FROM users\n WHERE username = $1\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "full_name",
"type_info": "Text"
},
{
"ordinal": 3,
"name": "role: Role",
"type_info": {
"Custom": {
"name": "user_role",
"kind": {
"Enum": [
"admin",
"writer"
]
}
}
}
},
{
"ordinal": 4,
"name": "member_since",
"type_info": "Timestamptz"
},
{
"ordinal": 5,
"name": "bio",
"type_info": "Text"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false,
false,
true,
false,
false,
true
]
},
"hash": "e049f4db1020c0a2979d5ee3c1c0519de59eee8594eb2e472877e5db6bf25271"
}

View File

@@ -1,20 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT count(*) FROM comments",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "count",
"type_info": "Int8"
}
],
"parameters": {
"Left": []
},
"nullable": [
null
]
},
"hash": "e056c3230c1ccd1b3b62e902f49a41f21213e0f7da92b428065986d380676034"
}

View File

@@ -1,22 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "SELECT user_id FROM users WHERE username = $1",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id",
"type_info": "Uuid"
}
],
"parameters": {
"Left": [
"Text"
]
},
"nullable": [
false
]
},
"hash": "f4ea2ad9ba4f26093152e4a0e008ef6c3114fbe9e51301611c5633e1cc944c05"
}

View File

@@ -1,15 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO notifications_delivered (email_id, newsletter_issue_id)\n VALUES ($1, $2)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Uuid"
]
},
"nullable": []
},
"hash": "f8afa9b469bf8c216c5855e1d6b7ee05281c9e7779f8fd6486780f882f46e385"
}

View File

@@ -1,60 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "\n SELECT c.user_id as \"user_id?\", u.username as \"username?\", c.comment_id, c.post_id, c.author, c.content, c.published_at\n FROM comments c\n LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL\n WHERE c.post_id = $1\n ORDER BY c.published_at DESC\n LIMIT $2\n OFFSET $3\n ",
"describe": {
"columns": [
{
"ordinal": 0,
"name": "user_id?",
"type_info": "Uuid"
},
{
"ordinal": 1,
"name": "username?",
"type_info": "Text"
},
{
"ordinal": 2,
"name": "comment_id",
"type_info": "Uuid"
},
{
"ordinal": 3,
"name": "post_id",
"type_info": "Uuid"
},
{
"ordinal": 4,
"name": "author",
"type_info": "Text"
},
{
"ordinal": 5,
"name": "content",
"type_info": "Text"
},
{
"ordinal": 6,
"name": "published_at",
"type_info": "Timestamptz"
}
],
"parameters": {
"Left": [
"Uuid",
"Int8",
"Int8"
]
},
"nullable": [
true,
false,
false,
false,
true,
false,
false
]
},
"hash": "fb280849a8a1fce21ec52cd9df73492d965357c9a410eb3b43b1a2e1cc8a0259"
}

View File

@@ -1,14 +0,0 @@
{
"db_name": "PostgreSQL",
"query": "INSERT INTO user_logins (user_id) VALUES ($1)",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid"
]
},
"nullable": []
},
"hash": "fc383671ada951baa611ab7dd00efcc7f4f2aea7c22e4c0865e5c766ed7f99b3"
}

406
Cargo.lock generated
View File

@@ -17,6 +17,19 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa" checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"getrandom 0.3.3",
"once_cell",
"version_check",
"zerocopy",
]
[[package]] [[package]]
name = "aho-corasick" name = "aho-corasick"
version = "1.1.3" version = "1.1.3"
@@ -227,6 +240,28 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "axum-server"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "495c05f60d6df0093e8fb6e74aa5846a0ad06abaf96d76166283720bf740f8ab"
dependencies = [
"arc-swap",
"bytes",
"fs-err",
"http",
"http-body",
"hyper",
"hyper-util",
"pin-project-lite",
"rustls",
"rustls-pemfile",
"rustls-pki-types",
"tokio",
"tokio-rustls",
"tower-service",
]
[[package]] [[package]]
name = "backtrace" name = "backtrace"
version = "0.3.75" version = "0.3.75"
@@ -529,29 +564,6 @@ dependencies = [
"typenum", "typenum",
] ]
[[package]]
name = "cssparser"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e901edd733a1472f944a45116df3f846f54d37e67e68640ac8bb69689aca2aa"
dependencies = [
"cssparser-macros",
"dtoa-short",
"itoa",
"phf",
"smallvec",
]
[[package]]
name = "cssparser-macros"
version = "0.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "13b588ba4ac1a99f7f2964d24b3d896ddc6bf847ee3855dbd4366f058cfcd331"
dependencies = [
"quote",
"syn",
]
[[package]] [[package]]
name = "darling" name = "darling"
version = "0.20.11" version = "0.20.11"
@@ -626,26 +638,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "derive_more"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "deunicode" name = "deunicode"
version = "1.6.2" version = "1.6.2"
@@ -699,27 +691,6 @@ version = "0.15.7"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b"
[[package]]
name = "dtoa"
version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6add3b8cff394282be81f3fc1a0605db594ed69890078ca6e2cab1c408bcf04"
[[package]]
name = "dtoa-short"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd1511a7b6a56299bd043a9c167a6d2bfb37bf84a6dfceaba651168adfb43c87"
dependencies = [
"dtoa",
]
[[package]]
name = "ego-tree"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2972feb8dffe7bc8c5463b1dacda1b0dfbed3710e50f977d965429692d74cd8"
[[package]] [[package]]
name = "either" name = "either"
version = "1.15.0" version = "1.15.0"
@@ -876,13 +847,13 @@ dependencies = [
] ]
[[package]] [[package]]
name = "futf" name = "fs-err"
version = "0.1.5" version = "3.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "df420e2e84819663797d1ec6544b13c5be84629e7bb00dc960d6917db2987843" checksum = "88d7be93788013f265201256d58f04936a8079ad5dc898743aa20525f503b683"
dependencies = [ dependencies = [
"mac", "autocfg",
"new_debug_unreachable", "tokio",
] ]
[[package]] [[package]]
@@ -985,15 +956,6 @@ dependencies = [
"slab", "slab",
] ]
[[package]]
name = "fxhash"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c"
dependencies = [
"byteorder",
]
[[package]] [[package]]
name = "generic-array" name = "generic-array"
version = "0.14.7" version = "0.14.7"
@@ -1005,12 +967,13 @@ dependencies = [
] ]
[[package]] [[package]]
name = "getopts" name = "gethostname"
version = "0.2.24" version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cfe4fbac503b8d1f88e6676011885f34b7174f46e59956bba534ba83abded4df" checksum = "c1ebd34e35c46e00bb73e81363248d627782724609fe1b6396f553f68fe3862e"
dependencies = [ dependencies = [
"unicode-width", "libc",
"winapi",
] ]
[[package]] [[package]]
@@ -1136,17 +1099,6 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "html5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55d958c2f74b664487a2035fe1dadb032c48718a03b63f3ab0b8537db8549ed4"
dependencies = [
"log",
"markup5ever",
"match_token",
]
[[package]] [[package]]
name = "http" name = "http"
version = "1.3.1" version = "1.3.1"
@@ -1550,12 +1502,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154" checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "mac"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]] [[package]]
name = "markdown" name = "markdown"
version = "1.0.0" version = "1.0.0"
@@ -1565,28 +1511,6 @@ dependencies = [
"unicode-id", "unicode-id",
] ]
[[package]]
name = "markup5ever"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "311fe69c934650f8f19652b3946075f0fc41ad8757dbb68f1ca14e7900ecc1c3"
dependencies = [
"log",
"tendril",
"web_atoms",
]
[[package]]
name = "match_token"
version = "0.35.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ac84fd3f360fcc43dc5f5d186f02a94192761a080e8bc58621ad4d12296a58cf"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]] [[package]]
name = "matchers" name = "matchers"
version = "0.1.0" version = "0.1.0"
@@ -1660,12 +1584,6 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "new_debug_unreachable"
version = "1.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "650eef8c711430f1a879fdd01d4745a7deea475becfb90269c06775983bbf086"
[[package]] [[package]]
name = "nom" name = "nom"
version = "7.1.3" version = "7.1.3"
@@ -1900,58 +1818,6 @@ dependencies = [
"sha2", "sha2",
] ]
[[package]]
name = "phf"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fd6780a80ae0c52cc120a26a1a42c1ae51b247a253e4e06113d23d2c2edd078"
dependencies = [
"phf_macros",
"phf_shared",
]
[[package]]
name = "phf_codegen"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "aef8048c789fa5e851558d709946d6d79a8ff88c0440c587967f8e94bfb1216a"
dependencies = [
"phf_generator",
"phf_shared",
]
[[package]]
name = "phf_generator"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c80231409c20246a13fddb31776fb942c38553c51e871f8cbd687a4cfb5843d"
dependencies = [
"phf_shared",
"rand 0.8.5",
]
[[package]]
name = "phf_macros"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f84ac04429c13a7ff43785d75ad27569f2951ce0ffd30a3321230db2fc727216"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "phf_shared"
version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67eabc2ef2a60eb7faa00097bd1ffdb5bd28e62bf39990626a582201b7a754e5"
dependencies = [
"siphasher",
]
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.16" version = "0.2.16"
@@ -2015,12 +1881,6 @@ dependencies = [
"zerocopy", "zerocopy",
] ]
[[package]]
name = "precomputed-hash"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "925383efa346730478fb4838dbe9137d2a47675ad789c546d150a6e1dd4ab31c"
[[package]] [[package]]
name = "proc-macro-error-attr2" name = "proc-macro-error-attr2"
version = "2.0.0" version = "2.0.0"
@@ -2092,9 +1952,9 @@ dependencies = [
[[package]] [[package]]
name = "quinn" name = "quinn"
version = "0.11.9" version = "0.11.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20" checksum = "626214629cda6781b6dc1d316ba307189c85ba657213ce642d9c77670f8202c8"
dependencies = [ dependencies = [
"bytes", "bytes",
"cfg_aliases", "cfg_aliases",
@@ -2103,7 +1963,7 @@ dependencies = [
"quinn-udp", "quinn-udp",
"rustc-hash", "rustc-hash",
"rustls", "rustls",
"socket2 0.6.0", "socket2 0.5.10",
"thiserror", "thiserror",
"tokio", "tokio",
"tracing", "tracing",
@@ -2112,9 +1972,9 @@ dependencies = [
[[package]] [[package]]
name = "quinn-proto" name = "quinn-proto"
version = "0.11.13" version = "0.11.12"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31" checksum = "49df843a9161c85bb8aae55f101bc0bac8bcafd637a620d9122fd7e0b2f7422e"
dependencies = [ dependencies = [
"bytes", "bytes",
"getrandom 0.3.3", "getrandom 0.3.3",
@@ -2133,14 +1993,14 @@ dependencies = [
[[package]] [[package]]
name = "quinn-udp" name = "quinn-udp"
version = "0.5.14" version = "0.5.13"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd" checksum = "fcebb1209ee276352ef14ff8732e24cc2b02bbac986cd74a4c81bcb2f9881970"
dependencies = [ dependencies = [
"cfg_aliases", "cfg_aliases",
"libc", "libc",
"once_cell", "once_cell",
"socket2 0.6.0", "socket2 0.5.10",
"tracing", "tracing",
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
@@ -2431,6 +2291,15 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "rustls-pemfile"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50"
dependencies = [
"rustls-pki-types",
]
[[package]] [[package]]
name = "rustls-pki-types" name = "rustls-pki-types"
version = "1.12.0" version = "1.12.0"
@@ -2470,21 +2339,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
[[package]]
name = "scraper"
version = "0.24.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5f3a24d916e78954af99281a455168d4a9515d65eca99a18da1b813689c4ad9"
dependencies = [
"cssparser",
"ego-tree",
"getopts",
"html5ever",
"precomputed-hash",
"selectors",
"tendril",
]
[[package]] [[package]]
name = "secrecy" name = "secrecy"
version = "0.10.3" version = "0.10.3"
@@ -2495,25 +2349,6 @@ dependencies = [
"zeroize", "zeroize",
] ]
[[package]]
name = "selectors"
version = "0.31.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5685b6ae43bfcf7d2e7dfcfb5d8e8f61b46442c902531e41a32a9a8bf0ee0fb6"
dependencies = [
"bitflags",
"cssparser",
"derive_more",
"fxhash",
"log",
"new_debug_unreachable",
"phf",
"phf_codegen",
"precomputed-hash",
"servo_arc",
"smallvec",
]
[[package]] [[package]]
name = "semver" name = "semver"
version = "1.0.26" version = "1.0.26"
@@ -2616,15 +2451,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "servo_arc"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "204ea332803bd95a0b60388590d59cf6468ec9becf626e2451f1d26a1d972de4"
dependencies = [
"stable_deref_trait",
]
[[package]] [[package]]
name = "sha1" name = "sha1"
version = "0.10.6" version = "0.10.6"
@@ -2672,12 +2498,6 @@ dependencies = [
"rand_core 0.6.4", "rand_core 0.6.4",
] ]
[[package]]
name = "siphasher"
version = "1.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56199f7ddabf13fe5074ce809e7d3f42b42ae711800501b5b16ea82ad029c39d"
[[package]] [[package]]
name = "slab" name = "slab"
version = "0.4.11" version = "0.4.11"
@@ -2936,31 +2756,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3"
[[package]]
name = "string_cache"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf776ba3fa74f83bf4b63c3dcbbf82173db2632ed8452cb2d891d33f459de70f"
dependencies = [
"new_debug_unreachable",
"parking_lot",
"phf_shared",
"precomputed-hash",
"serde",
]
[[package]]
name = "string_cache_codegen"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c711928715f1fe0fe509c53b43e993a9a557babc2d0a3567d0a3006f1ac931a0"
dependencies = [
"phf_generator",
"phf_shared",
"proc-macro2",
"quote",
]
[[package]] [[package]]
name = "stringprep" name = "stringprep"
version = "0.1.5" version = "0.1.5"
@@ -3015,17 +2810,6 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "tendril"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d24a120c5fc464a3458240ee02c299ebcb9d67b5249c8848b09d639dca8d7bb0"
dependencies = [
"futf",
"mac",
"utf-8",
]
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.16" version = "2.0.16"
@@ -3151,9 +2935,9 @@ dependencies = [
[[package]] [[package]]
name = "tokio-rustls" name = "tokio-rustls"
version = "0.26.3" version = "0.26.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "05f63835928ca123f1bef57abbcd23bb2ba0ac9ae1235f1e65bda0d06e7786bd" checksum = "8e727b36a1a0e8b74c376ac2211e40c2c8af09fb4013c60d910495810f008e9b"
dependencies = [ dependencies = [
"rustls", "rustls",
"tokio", "tokio",
@@ -3374,6 +3158,24 @@ dependencies = [
"syn", "syn",
] ]
[[package]]
name = "tracing-bunyan-formatter"
version = "0.3.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d637245a0d8774bd48df6482e086c59a8b5348a910c3b0579354045a9d82411"
dependencies = [
"ahash",
"gethostname",
"log",
"serde",
"serde_json",
"time",
"tracing",
"tracing-core",
"tracing-log 0.1.4",
"tracing-subscriber",
]
[[package]] [[package]]
name = "tracing-core" name = "tracing-core"
version = "0.1.34" version = "0.1.34"
@@ -3384,6 +3186,17 @@ dependencies = [
"valuable", "valuable",
] ]
[[package]]
name = "tracing-log"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]] [[package]]
name = "tracing-log" name = "tracing-log"
version = "0.2.0" version = "0.2.0"
@@ -3410,7 +3223,7 @@ dependencies = [
"thread_local", "thread_local",
"tracing", "tracing",
"tracing-core", "tracing-core",
"tracing-log", "tracing-log 0.2.0",
] ]
[[package]] [[package]]
@@ -3488,12 +3301,6 @@ version = "1.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493" checksum = "f6ccf251212114b54433ec949fd6a7841275f9ada20dddd2f29e9ceea4501493"
[[package]]
name = "unicode-width"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a1a07cc7db3810833284e8d372ccdc6da29741639ecc70c9ec107df0fa6154c"
[[package]] [[package]]
name = "untrusted" name = "untrusted"
version = "0.9.0" version = "0.9.0"
@@ -3517,12 +3324,6 @@ version = "2.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da" checksum = "daf8dba3b7eb870caf1ddeed7bc9d2a049f3cfdfae7cb521b087cc33ae4c49da"
[[package]]
name = "utf-8"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9"
[[package]] [[package]]
name = "utf8_iter" name = "utf8_iter"
version = "1.0.4" version = "1.0.4"
@@ -3710,18 +3511,6 @@ dependencies = [
"wasm-bindgen", "wasm-bindgen",
] ]
[[package]]
name = "web_atoms"
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57ffde1dc01240bdf9992e3205668b235e59421fd085e8a317ed98da0178d414"
dependencies = [
"phf",
"phf_codegen",
"string_cache",
"string_cache_codegen",
]
[[package]] [[package]]
name = "webpki-roots" name = "webpki-roots"
version = "0.26.11" version = "0.26.11"
@@ -4070,6 +3859,8 @@ dependencies = [
"argon2", "argon2",
"askama", "askama",
"axum", "axum",
"axum-server",
"base64 0.22.1",
"chrono", "chrono",
"claims", "claims",
"config", "config",
@@ -4081,11 +3872,11 @@ dependencies = [
"quickcheck_macros", "quickcheck_macros",
"rand 0.9.2", "rand 0.9.2",
"reqwest", "reqwest",
"scraper",
"secrecy", "secrecy",
"serde", "serde",
"serde-aux", "serde-aux",
"serde_json", "serde_json",
"serde_urlencoded",
"sqlx", "sqlx",
"thiserror", "thiserror",
"tokio", "tokio",
@@ -4093,7 +3884,10 @@ dependencies = [
"tower-sessions", "tower-sessions",
"tower-sessions-redis-store", "tower-sessions-redis-store",
"tracing", "tracing",
"tracing-bunyan-formatter",
"tracing-subscriber", "tracing-subscriber",
"unicode-segmentation",
"urlencoding",
"uuid", "uuid",
"validator", "validator",
"wiremock", "wiremock",

View File

@@ -11,31 +11,21 @@ path = "src/lib.rs"
path = "src/main.rs" path = "src/main.rs"
name = "zero2prod" name = "zero2prod"
[profile.release]
opt-level = 'z'
lto = true
codegen-units = 1
panic = 'abort'
strip = true
rpath = false
debug = false
debug-assertions = false
overflow-checks = false
incremental = false
[dependencies] [dependencies]
anyhow = "1.0.99" anyhow = "1.0.99"
argon2 = { version = "0.5.3", features = ["std"] } argon2 = { version = "0.5.3", features = ["std"] }
askama = "0.14.0" askama = "0.14.0"
axum = { version = "0.8.4", features = ["macros"] } axum = { version = "0.8.4", features = ["macros"] }
axum-server = { version = "0.7.2", features = ["tls-rustls-no-provider"] }
base64 = "0.22.1"
chrono = { version = "0.4.41", default-features = false, features = ["clock"] } chrono = { version = "0.4.41", default-features = false, features = ["clock"] }
config = "0.15.14" config = "0.15.14"
markdown = "1.0.0" markdown = "1.0.0"
rand = { version = "0.9.2", features = ["std_rng"] } rand = { version = "0.9.2", features = ["std_rng"] }
reqwest = { version = "0.12.23", default-features = false, features = [ reqwest = { version = "0.12.23", default-features = false, features = [
"cookies",
"json",
"rustls-tls", "rustls-tls",
"json",
"cookies",
] } ] }
secrecy = { version = "0.10.3", features = ["serde"] } secrecy = { version = "0.10.3", features = ["serde"] }
serde = { version = "1.0.219", features = ["derive"] } serde = { version = "1.0.219", features = ["derive"] }
@@ -54,7 +44,10 @@ tower-http = { version = "0.6.6", features = ["fs", "trace"] }
tower-sessions = "0.14.0" tower-sessions = "0.14.0"
tower-sessions-redis-store = "0.16.0" tower-sessions-redis-store = "0.16.0"
tracing = "0.1.41" tracing = "0.1.41"
tracing-bunyan-formatter = "0.3.10"
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
unicode-segmentation = "1.12.0"
urlencoding = "2.1.3"
uuid = { version = "1.18.0", features = ["v4", "serde"] } uuid = { version = "1.18.0", features = ["v4", "serde"] }
validator = { version = "0.20.0", features = ["derive"] } validator = { version = "0.20.0", features = ["derive"] }
@@ -65,6 +58,6 @@ linkify = "0.10.0"
once_cell = "1.21.3" once_cell = "1.21.3"
quickcheck = "1.0.3" quickcheck = "1.0.3"
quickcheck_macros = "1.1.0" quickcheck_macros = "1.1.0"
scraper = "0.24.0"
serde_json = "1.0.143" serde_json = "1.0.143"
serde_urlencoded = "0.7.1"
wiremock = "0.6.4" wiremock = "0.6.4"

View File

@@ -1,6 +1,5 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.90.0 AS chef FROM lukemathwalker/cargo-chef:latest-rust-1.89.0 AS chef
WORKDIR /app WORKDIR /app
RUN apt update && apt install -y nodejs npm clang mold && rm -rf /var/lib/apt/lists/*
FROM chef AS planner FROM chef AS planner
COPY . . COPY . .
@@ -8,17 +7,21 @@ RUN cargo chef prepare --recipe-path recipe.json
FROM chef AS builder FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json COPY --from=planner /app/recipe.json recipe.json
RUN apt update -y \
&& apt install -y --no-install-recommends clang mold
RUN cargo chef cook --release --recipe-path recipe.json RUN cargo chef cook --release --recipe-path recipe.json
COPY . . COPY . .
ENV SQLX_OFFLINE=true ENV SQLX_OFFLINE=true
ENV RUSTFLAGS="-C strip=symbols"
RUN cargo build --release --bin zero2prod RUN cargo build --release --bin zero2prod
RUN npm install && npm run build-css
FROM gcr.io/distroless/cc-debian12 AS runtime FROM debian:bookworm-slim AS runtime
WORKDIR /app WORKDIR /app
RUN apt update -y \
&& apt install -y --no-install-recommends openssl ca-certificates \
&& apt autoremove -y \
&& apt clean -y \
&& rm -rf /var/lib/apt/lists/*
COPY --from=builder /app/target/release/zero2prod zero2prod COPY --from=builder /app/target/release/zero2prod zero2prod
COPY --from=builder /app/assets assets COPY configuration configuration
COPY --from=builder /app/configuration configuration
ENV APP_ENVIRONMENT=production ENV APP_ENVIRONMENT=production
ENTRYPOINT [ "./zero2prod" ] ENTRYPOINT [ "./zero2prod" ]

File diff suppressed because one or more lines are too long

Binary file not shown.

Before

Width:  |  Height:  |  Size: 874 B

View File

@@ -2,16 +2,15 @@ application:
port: 8080 port: 8080
host: "127.0.0.1" host: "127.0.0.1"
base_url: "http://127.0.0.1:8080" base_url: "http://127.0.0.1:8080"
email_client:
authorization_token: "secret-token"
database: database:
host: "127.0.0.1" host: "127.0.0.1"
port: 5432 port: 5432
database_name: "newsletter" database_name: "newsletter"
username: "postgres" username: "postgres"
password: "password" password: "Jq09NF6Y8ZXJS4jd9c8U"
require_ssl: false require_ssl: false
timeout_milliseconds: 1000 timeout_millis: 1000
kv_store: email_client:
host: "127.0.0.1" authorization_token: "secret-token"
port: 6379 redis_uri: "redis://127.0.0.1:6379"
require_tls: false

View File

@@ -1,4 +1,4 @@
application: application:
host: "0.0.0.0" host: "0.0.0.0"
database: database:
timeout_milliseconds: 500 timeout_millis: 500

View File

@@ -1 +0,0 @@
ALTER TABLE issue_delivery_queue ADD COLUMN kind TEXT NOT NULL;

View File

@@ -1,7 +0,0 @@
CREATE TABLE notifications_delivered (
email_id UUID PRIMARY KEY,
newsletter_issue_id UUID NOT NULL
REFERENCES newsletter_issues (newsletter_issue_id),
delivered_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
opened BOOLEAN NOT NULL DEFAULT FALSE
);

View File

@@ -1,7 +0,0 @@
CREATE TYPE user_role AS ENUM ('admin', 'writer');
ALTER TABLE users ADD COLUMN role user_role;
UPDATE users SET role = 'admin' WHERE role IS NULL;
ALTER TABLE users ALTER COLUMN role SET NOT NULL;

View File

@@ -1,4 +0,0 @@
ALTER TABLE users
ADD COLUMN full_name TEXT,
ADD COLUMN bio TEXT,
ADD COLUMN member_since TIMESTAMPTZ NOT NULL DEFAULT NOW();

View File

@@ -1,7 +0,0 @@
CREATE TABLE comments (
comment_id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
post_id UUID NOT NULL REFERENCES posts (post_id) ON DELETE CASCADE,
author TEXT,
content TEXT NOT NULL,
published_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

View File

@@ -1,11 +0,0 @@
ALTER TABLE idempotency
DROP CONSTRAINT idempotency_user_id_fkey;
ALTER TABLE idempotency
DROP CONSTRAINT idempotency_pkey;
ALTER TABLE idempotency
ADD PRIMARY KEY (idempotency_key);
ALTER TABLE idempotency
DROP COLUMN user_id;

View File

@@ -1,3 +0,0 @@
ALTER TABLE comments
ADD COLUMN user_id UUID
REFERENCES users (user_id) ON DELETE SET NULL;

View File

@@ -1,2 +0,0 @@
ALTER TABLE posts
ADD COLUMN last_modified TIMESTAMPTZ;

View File

@@ -1,5 +0,0 @@
CREATE TABLE user_logins (
id BIGSERIAL PRIMARY KEY,
user_id UUID NOT NULL REFERENCES users (user_id) ON DELETE CASCADE,
login_time TIMESTAMPTZ NOT NULL DEFAULT NOW()
);

3
package-lock.json generated
View File

@@ -1123,8 +1123,7 @@
"version": "4.1.13", "version": "4.1.13",
"resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.13.tgz", "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-4.1.13.tgz",
"integrity": "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w==", "integrity": "sha512-i+zidfmTqtwquj4hMEwdjshYYgMbOrPzb9a0M3ZgNa0JMoZeFC6bxZvO8yr8ozS6ix2SDz0+mvryPeBs2TFE+w==",
"license": "MIT", "license": "MIT"
"peer": true
}, },
"node_modules/tapable": { "node_modules/tapable": {
"version": "2.2.3", "version": "2.2.3",

View File

@@ -1,7 +1,6 @@
{ {
"scripts": { "scripts": {
"build-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --minify", "build-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --minify --watch"
"watch-css": "tailwindcss -i ./templates/input.css -o ./assets/css/main.css --watch"
}, },
"dependencies": { "dependencies": {
"@tailwindcss/cli": "^4.1.13", "@tailwindcss/cli": "^4.1.13",

View File

@@ -14,7 +14,7 @@ if ! [ -x "$(command -v sqlx)" ]; then
fi fi
DB_USER="${POSTGRES_USER:=postgres}" DB_USER="${POSTGRES_USER:=postgres}"
DB_PASSWORD="${POSTGRES_PASSWORD:=password}" DB_PASSWORD="${POSTGRES_PASSWORD:=Jq09NF6Y8ZXJS4jd9c8U}"
DB_NAME="${POSTGRES_DB:=newsletter}" DB_NAME="${POSTGRES_DB:=newsletter}"
DB_PORT="${POSTGRES_PORT:=5432}" DB_PORT="${POSTGRES_PORT:=5432}"
DB_HOST="${POSTGRES_HOST:=localhost}" DB_HOST="${POSTGRES_HOST:=localhost}"

View File

@@ -1,12 +1,14 @@
use crate::telemetry::spawn_blocking_with_tracing; use crate::{
routes::AdminError, session_state::TypedSession, telemetry::spawn_blocking_with_tracing,
};
use anyhow::Context; use anyhow::Context;
use argon2::{ use argon2::{
Algorithm, Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier, Version, Algorithm, Argon2, Params, PasswordHash, PasswordHasher, PasswordVerifier, Version,
password_hash::{SaltString, rand_core::OsRng}, password_hash::{SaltString, rand_core::OsRng},
}; };
use axum::{extract::Request, middleware::Next, response::Response};
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
use sqlx::PgPool; use sqlx::PgPool;
use std::fmt::Display;
use uuid::Uuid; use uuid::Uuid;
pub struct Credentials { pub struct Credentials {
@@ -42,7 +44,7 @@ pub async fn change_password(
Ok(()) Ok(())
} }
pub(crate) fn compute_pasword_hash(password: SecretString) -> Result<SecretString, anyhow::Error> { fn compute_pasword_hash(password: SecretString) -> Result<SecretString, anyhow::Error> {
let salt = SaltString::generate(&mut OsRng); let salt = SaltString::generate(&mut OsRng);
let password_hash = Argon2::new( let password_hash = Argon2::new(
Algorithm::Argon2id, Algorithm::Argon2id,
@@ -54,13 +56,15 @@ pub(crate) fn compute_pasword_hash(password: SecretString) -> Result<SecretStrin
Ok(SecretString::from(password_hash)) Ok(SecretString::from(password_hash))
} }
#[tracing::instrument(name = "Validate credentials", skip_all)] #[tracing::instrument(
name = "Validate credentials",
skip(username, password, connection_pool)
)]
pub async fn validate_credentials( pub async fn validate_credentials(
Credentials { username, password }: Credentials, Credentials { username, password }: Credentials,
connection_pool: &PgPool, connection_pool: &PgPool,
) -> Result<(Uuid, Role), AuthError> { ) -> Result<Uuid, AuthError> {
let mut user_id = None; let mut user_id = None;
let mut role = None;
let mut expected_password_hash = SecretString::from( let mut expected_password_hash = SecretString::from(
"$argon2id$v=19$m=15000,t=2,p=1$\ "$argon2id$v=19$m=15000,t=2,p=1$\
gZiV/M1gPc22ElAH/Jh1Hw$\ gZiV/M1gPc22ElAH/Jh1Hw$\
@@ -68,14 +72,13 @@ CWOrkoo7oJBQ/iyh7uJ0LO2aLEfrHwTWllSAxT0zRno"
.to_string(), .to_string(),
); );
if let Some((stored_user_id, stored_expected_password_hash, stored_role)) = if let Some((stored_user_id, stored_expected_password_hash)) =
get_stored_credentials(&username, connection_pool) get_stored_credentials(&username, connection_pool)
.await .await
.context("Failed to retrieve credentials from database.") .context("Failed to retrieve credentials from database.")
.map_err(AuthError::UnexpectedError)? .map_err(AuthError::UnexpectedError)?
{ {
user_id = Some(stored_user_id); user_id = Some(stored_user_id);
role = Some(stored_role);
expected_password_hash = stored_expected_password_hash; expected_password_hash = stored_expected_password_hash;
} }
@@ -86,19 +89,18 @@ CWOrkoo7oJBQ/iyh7uJ0LO2aLEfrHwTWllSAxT0zRno"
.ok_or_else(|| anyhow::anyhow!("Unknown username.")) .ok_or_else(|| anyhow::anyhow!("Unknown username."))
.map_err(AuthError::InvalidCredentials)?; .map_err(AuthError::InvalidCredentials)?;
let role = role
.ok_or_else(|| anyhow::anyhow!("Unknown role."))
.map_err(AuthError::UnexpectedError)?;
handle handle
.await .await
.context("Failed to spawn blocking task.") .context("Failed to spawn blocking task.")
.map_err(AuthError::UnexpectedError)? .map_err(AuthError::UnexpectedError)?
.map_err(AuthError::InvalidCredentials) .map_err(AuthError::InvalidCredentials)
.map(|_| (uuid, role)) .map(|_| uuid)
} }
#[tracing::instrument(name = "Verify password", skip_all)] #[tracing::instrument(
name = "Verify password",
skip(expected_password_hash, password_candidate)
)]
fn verify_password_hash( fn verify_password_hash(
expected_password_hash: SecretString, expected_password_hash: SecretString,
password_candidate: SecretString, password_candidate: SecretString,
@@ -113,14 +115,14 @@ fn verify_password_hash(
.context("Password verification failed.") .context("Password verification failed.")
} }
#[tracing::instrument(name = "Get stored credentials", skip(connection_pool))] #[tracing::instrument(name = "Get stored credentials", skip(username, connection_pool))]
async fn get_stored_credentials( async fn get_stored_credentials(
username: &str, username: &str,
connection_pool: &PgPool, connection_pool: &PgPool,
) -> Result<Option<(Uuid, SecretString, Role)>, sqlx::Error> { ) -> Result<Option<(Uuid, SecretString)>, sqlx::Error> {
let row = sqlx::query!( let row = sqlx::query!(
r#" r#"
SELECT user_id, password_hash, role as "role: Role" SELECT user_id, password_hash
FROM users FROM users
WHERE username = $1 WHERE username = $1
"#, "#,
@@ -128,35 +130,37 @@ async fn get_stored_credentials(
) )
.fetch_optional(connection_pool) .fetch_optional(connection_pool)
.await? .await?
.map(|row| (row.user_id, SecretString::from(row.password_hash), row.role)); .map(|row| (row.user_id, SecretString::from(row.password_hash)));
Ok(row) Ok(row)
} }
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone, Copy, PartialEq, Eq, sqlx::Type)] pub async fn require_auth(
#[sqlx(type_name = "user_role", rename_all = "lowercase")] session: TypedSession,
pub enum Role { mut request: Request,
Admin, next: Next,
Writer, ) -> Result<Response, AdminError> {
} let user_id = session
.get_user_id()
.await
.map_err(|e| AdminError::UnexpectedError(e.into()))?
.ok_or(AdminError::NotAuthenticated)?;
let username = session
.get_username()
.await
.map_err(|e| AdminError::UnexpectedError(e.into()))?
.ok_or(AdminError::UnexpectedError(anyhow::anyhow!(
"Could not find username in session."
)))?;
impl Display for Role { request
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { .extensions_mut()
match self { .insert(AuthenticatedUser { user_id, username });
Role::Admin => write!(f, "admin"),
Role::Writer => write!(f, "writer"), Ok(next.run(request).await)
}
}
} }
#[derive(Clone)] #[derive(Clone)]
pub struct AuthenticatedUser { pub struct AuthenticatedUser {
pub user_id: Uuid, pub user_id: Uuid,
pub username: String, pub username: String,
pub role: Role,
}
impl AuthenticatedUser {
pub fn is_admin(&self) -> bool {
matches!(self.role, Role::Admin)
}
} }

View File

@@ -1,13 +1,8 @@
use crate::domain::SubscriberEmail; use crate::domain::SubscriberEmail;
use anyhow::Context;
use secrecy::{ExposeSecret, SecretString}; use secrecy::{ExposeSecret, SecretString};
use serde::Deserialize; use serde::Deserialize;
use serde_aux::field_attributes::deserialize_number_from_string; use serde_aux::field_attributes::deserialize_number_from_string;
use sqlx::postgres::{PgConnectOptions, PgSslMode}; use sqlx::postgres::{PgConnectOptions, PgSslMode};
use tower_sessions_redis_store::{
RedisStore,
fred::prelude::{ClientLike, Pool},
};
pub fn get_configuration() -> Result<Settings, config::ConfigError> { pub fn get_configuration() -> Result<Settings, config::ConfigError> {
let base_path = std::env::current_dir().expect("Failed to determine the current directory"); let base_path = std::env::current_dir().expect("Failed to determine the current directory");
@@ -65,7 +60,8 @@ pub struct Settings {
pub application: ApplicationSettings, pub application: ApplicationSettings,
pub database: DatabaseSettings, pub database: DatabaseSettings,
pub email_client: EmailClientSettings, pub email_client: EmailClientSettings,
pub kv_store: RedisSettings, pub redis_uri: SecretString,
pub require_tls: bool,
} }
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
@@ -105,35 +101,6 @@ impl EmailClientSettings {
} }
} }
#[derive(Clone, Deserialize)]
pub struct RedisSettings {
pub host: String,
pub port: u16,
}
impl RedisSettings {
pub fn connection_string(&self) -> String {
format!("redis://{}:{}", self.host, self.port)
}
pub async fn session_store(&self) -> Result<RedisStore<Pool>, anyhow::Error> {
let pool = Pool::new(
tower_sessions_redis_store::fred::prelude::Config::from_url(&self.connection_string())
.context("Failed to parse Redis URL string.")?,
None,
None,
None,
6,
)
.unwrap();
pool.connect();
pool.wait_for_connect()
.await
.context("Failed to connect to the Redis server.")?;
Ok(RedisStore::new(pool))
}
}
#[derive(Clone, Deserialize)] #[derive(Clone, Deserialize)]
pub struct DatabaseSettings { pub struct DatabaseSettings {
pub username: String, pub username: String,
@@ -143,7 +110,7 @@ pub struct DatabaseSettings {
pub host: String, pub host: String,
pub database_name: String, pub database_name: String,
pub require_ssl: bool, pub require_ssl: bool,
pub timeout_milliseconds: u64, pub timeout_millis: u64,
} }
impl DatabaseSettings { impl DatabaseSettings {

View File

@@ -1,58 +0,0 @@
use anyhow::Context;
use sqlx::{
PgPool,
postgres::{PgConnectOptions, PgPoolOptions},
};
use std::time::Duration;
pub async fn run_until_stopped(configuration: PgConnectOptions) -> Result<(), anyhow::Error> {
let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration);
worker_loop(connection_pool).await
}
async fn worker_loop(connection_pool: PgPool) -> Result<(), anyhow::Error> {
loop {
if let Err(e) = clean_pending_subscriptions(&connection_pool).await {
tracing::error!("{:?}", e);
}
if let Err(e) = clean_idempotency_keys(&connection_pool).await {
tracing::error!("{:?}", e);
}
tokio::time::sleep(Duration::from_secs(60)).await;
}
}
async fn clean_pending_subscriptions(connection_pool: &PgPool) -> Result<(), anyhow::Error> {
let result = sqlx::query!(
"
DELETE FROM subscriptions
WHERE status = 'pending_confirmation'
AND subscribed_at < NOW() - INTERVAL '24 hours'
"
)
.execute(connection_pool)
.await
.context("Failed to clean up subscriptions table.")?;
match result.rows_affected() {
n if n > 0 => tracing::info!("Cleaned up {} expired subscriptions.", n),
_ => (),
}
Ok(())
}
async fn clean_idempotency_keys(connection_pool: &PgPool) -> Result<(), anyhow::Error> {
let result = sqlx::query!(
"
DELETE FROM idempotency
WHERE created_at < NOW() - INTERVAL '1 hour'
"
)
.execute(connection_pool)
.await
.context("Failed to clean up idempontency table.")?;
match result.rows_affected() {
n if n > 0 => tracing::info!("Cleaned up {} old idempotency records.", n),
_ => (),
}
Ok(())
}

View File

@@ -1,13 +1,7 @@
mod comment;
mod new_subscriber; mod new_subscriber;
mod post; mod post;
mod subscriber_email; mod subscriber_email;
mod subscribers;
mod user;
pub use comment::CommentEntry;
pub use new_subscriber::NewSubscriber; pub use new_subscriber::NewSubscriber;
pub use post::PostEntry; pub use post::PostEntry;
pub use subscriber_email::SubscriberEmail; pub use subscriber_email::SubscriberEmail;
pub use subscribers::SubscriberEntry;
pub use user::UserEntry;

View File

@@ -1,18 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct CommentEntry {
pub user_id: Option<Uuid>,
pub username: Option<String>,
pub comment_id: Uuid,
pub post_id: Uuid,
pub author: Option<String>,
pub content: String,
pub published_at: DateTime<Utc>,
}
impl CommentEntry {
pub fn formatted_date(&self) -> String {
self.published_at.format("%B %d, %Y %H:%M").to_string()
}
}

View File

@@ -3,23 +3,25 @@ use uuid::Uuid;
pub struct PostEntry { pub struct PostEntry {
pub post_id: Uuid, pub post_id: Uuid,
pub author_id: Uuid, pub author: Option<String>,
pub author: String,
pub full_name: Option<String>,
pub title: String, pub title: String,
pub content: String, pub content: String,
pub published_at: DateTime<Utc>, pub published_at: DateTime<Utc>,
pub last_modified: Option<DateTime<Utc>>,
} }
impl PostEntry { impl PostEntry {
#[allow(dead_code)]
pub fn formatted_date(&self) -> String { pub fn formatted_date(&self) -> String {
self.published_at.format("%B %d, %Y %H:%M").to_string() self.published_at.format("%B %d, %Y").to_string()
} }
pub fn to_html(&self) -> anyhow::Result<String> { pub fn to_html(self) -> Result<Self, anyhow::Error> {
match markdown::to_html_with_options(&self.content, &markdown::Options::gfm()) { match markdown::to_html_with_options(&self.content, &markdown::Options::gfm()) {
Ok(content) => Ok(content), Ok(mut content) => {
content = content.replace("<table>", r#"<div class="table-wrapper"><table>"#);
content = content.replace("</table>", r#"</table></div>"#);
Ok(Self { content, ..self })
}
Err(e) => anyhow::bail!(e), Err(e) => anyhow::bail!(e),
} }
} }

View File

@@ -1,5 +1,3 @@
use std::fmt::Display;
use validator::Validate; use validator::Validate;
#[derive(Debug, Validate)] #[derive(Debug, Validate)]
@@ -24,12 +22,6 @@ impl AsRef<str> for SubscriberEmail {
} }
} }
impl Display for SubscriberEmail {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.email)
}
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::SubscriberEmail; use super::SubscriberEmail;

View File

@@ -1,21 +0,0 @@
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct SubscriberEntry {
pub id: Uuid,
pub email: String,
pub subscribed_at: DateTime<Utc>,
pub status: String,
pub unsubscribe_token: Option<String>,
}
impl SubscriberEntry {
pub fn confirmed(&self) -> bool {
self.status == "confirmed"
}
#[allow(dead_code)]
pub fn formatted_date(&self) -> String {
self.subscribed_at.format("%B %d, %Y").to_string()
}
}

View File

@@ -1,22 +0,0 @@
use crate::authentication::Role;
use chrono::{DateTime, Utc};
use uuid::Uuid;
pub struct UserEntry {
pub user_id: Uuid,
pub username: String,
pub role: Role,
pub full_name: Option<String>,
pub bio: Option<String>,
pub member_since: DateTime<Utc>,
}
impl UserEntry {
pub fn formatted_date(&self) -> String {
self.member_since.format("%B %d, %Y").to_string()
}
pub fn is_admin(&self) -> bool {
matches!(self.role, Role::Admin)
}
}

View File

@@ -119,7 +119,7 @@ mod tests {
EmailClient::build(settings).unwrap() EmailClient::build(settings).unwrap()
} }
#[sqlx::test] #[tokio::test]
async fn send_email_sends_the_expected_request() { async fn send_email_sends_the_expected_request() {
let mock_server = MockServer::start().await; let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri()); let email_client = email_client(mock_server.uri());
@@ -141,7 +141,7 @@ mod tests {
.unwrap(); .unwrap();
} }
#[sqlx::test] #[tokio::test]
async fn send_email_succeeds_if_the_server_returns_200() { async fn send_email_succeeds_if_the_server_returns_200() {
let mock_server = MockServer::start().await; let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri()); let email_client = email_client(mock_server.uri());
@@ -159,7 +159,7 @@ mod tests {
assert_ok!(response); assert_ok!(response);
} }
#[sqlx::test] #[tokio::test]
async fn send_email_fails_if_the_server_retuns_500() { async fn send_email_fails_if_the_server_retuns_500() {
let mock_server = MockServer::start().await; let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri()); let email_client = email_client(mock_server.uri());
@@ -177,7 +177,7 @@ mod tests {
assert_err!(response); assert_err!(response);
} }
#[sqlx::test] #[tokio::test]
async fn send_email_times_out_if_the_server_takes_too_long() { async fn send_email_times_out_if_the_server_takes_too_long() {
let mock_server = MockServer::start().await; let mock_server = MockServer::start().await;
let email_client = email_client(mock_server.uri()); let email_client = email_client(mock_server.uri());

View File

@@ -1,4 +1,3 @@
#[derive(Debug)]
pub struct IdempotencyKey(String); pub struct IdempotencyKey(String);
impl TryFrom<String> for IdempotencyKey { impl TryFrom<String> for IdempotencyKey {

View File

@@ -7,6 +7,7 @@ use axum::{
use reqwest::StatusCode; use reqwest::StatusCode;
use sqlx::{Executor, PgPool, Postgres, Transaction}; use sqlx::{Executor, PgPool, Postgres, Transaction};
use std::str::FromStr; use std::str::FromStr;
use uuid::Uuid;
#[derive(Debug, sqlx::Type)] #[derive(Debug, sqlx::Type)]
#[sqlx(type_name = "header_pair")] #[sqlx(type_name = "header_pair")]
@@ -15,13 +16,10 @@ struct HeaderPairRecord {
value: Vec<u8>, value: Vec<u8>,
} }
#[tracing::instrument(
name = "Fetching saved response in database if it exists",
skip(connection_pool)
)]
pub async fn get_saved_response( pub async fn get_saved_response(
connection_pool: &PgPool, connection_pool: &PgPool,
idempotency_key: &IdempotencyKey, idempotency_key: &IdempotencyKey,
user_id: Uuid,
) -> Result<Option<Response>, anyhow::Error> { ) -> Result<Option<Response>, anyhow::Error> {
let saved_response = sqlx::query!( let saved_response = sqlx::query!(
r#" r#"
@@ -30,8 +28,11 @@ pub async fn get_saved_response(
response_headers as "response_headers!: Vec<HeaderPairRecord>", response_headers as "response_headers!: Vec<HeaderPairRecord>",
response_body as "response_body!" response_body as "response_body!"
FROM idempotency FROM idempotency
WHERE idempotency_key = $1 WHERE
user_id = $1
AND idempotency_key = $2
"#, "#,
user_id,
idempotency_key.as_ref() idempotency_key.as_ref()
) )
.fetch_optional(connection_pool) .fetch_optional(connection_pool)
@@ -52,10 +53,10 @@ pub async fn get_saved_response(
} }
} }
#[tracing::instrument(name = "Saving response in database", skip(transaction, response))]
pub async fn save_response( pub async fn save_response(
mut transaction: Transaction<'static, Postgres>, mut transaction: Transaction<'static, Postgres>,
idempotency_key: &IdempotencyKey, idempotency_key: &IdempotencyKey,
user_id: Uuid,
response: Response<Body>, response: Response<Body>,
) -> Result<Response<Body>, anyhow::Error> { ) -> Result<Response<Body>, anyhow::Error> {
let status_code = response.status().as_u16() as i16; let status_code = response.status().as_u16() as i16;
@@ -74,11 +75,14 @@ pub async fn save_response(
r#" r#"
UPDATE idempotency UPDATE idempotency
SET SET
response_status_code = $2, response_status_code = $3,
response_headers = $3, response_headers = $4,
response_body = $4 response_body = $5
WHERE idempotency_key = $1 WHERE
user_id = $1
AND idempotency_key = $2
"#, "#,
user_id,
idempotency_key.as_ref(), idempotency_key.as_ref(),
status_code, status_code,
headers, headers,
@@ -100,21 +104,23 @@ pub enum NextAction {
pub async fn try_processing( pub async fn try_processing(
connection_pool: &PgPool, connection_pool: &PgPool,
idempotency_key: &IdempotencyKey, idempotency_key: &IdempotencyKey,
user_id: Uuid,
) -> Result<NextAction, anyhow::Error> { ) -> Result<NextAction, anyhow::Error> {
let mut transaction = connection_pool.begin().await?; let mut transaction = connection_pool.begin().await?;
let query = sqlx::query!( let query = sqlx::query!(
r#" r#"
INSERT INTO idempotency (idempotency_key, created_at) INSERT INTO idempotency (user_id, idempotency_key, created_at)
VALUES ($1, now()) VALUES ($1, $2, now())
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
"#, "#,
user_id,
idempotency_key.as_ref() idempotency_key.as_ref()
); );
let n_inserted_rows = transaction.execute(query).await?.rows_affected(); let n_inserted_rows = transaction.execute(query).await?.rows_affected();
if n_inserted_rows > 0 { if n_inserted_rows > 0 {
Ok(NextAction::StartProcessing(transaction)) Ok(NextAction::StartProcessing(transaction))
} else { } else {
let saved_response = get_saved_response(connection_pool, idempotency_key) let saved_response = get_saved_response(connection_pool, idempotency_key, user_id)
.await? .await?
.ok_or_else(|| anyhow::anyhow!("Could not find saved response."))?; .ok_or_else(|| anyhow::anyhow!("Could not find saved response."))?;
Ok(NextAction::ReturnSavedResponse(saved_response)) Ok(NextAction::ReturnSavedResponse(saved_response))

View File

@@ -1,13 +1,10 @@
use crate::{ use crate::{configuration::Settings, domain::SubscriberEmail, email_client::EmailClient};
configuration::Settings, domain::SubscriberEmail, email_client::EmailClient, routes::EmailType,
};
use anyhow::Context;
use sqlx::{Executor, PgPool, Postgres, Row, Transaction, postgres::PgPoolOptions}; use sqlx::{Executor, PgPool, Postgres, Row, Transaction, postgres::PgPoolOptions};
use std::time::Duration; use std::time::Duration;
use tracing::{Span, field::display}; use tracing::{Span, field::display};
use uuid::Uuid; use uuid::Uuid;
pub async fn run_until_stopped(configuration: Settings) -> Result<(), anyhow::Error> { pub async fn run_worker_until_stopped(configuration: Settings) -> Result<(), anyhow::Error> {
let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db()); let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db());
let email_client = EmailClient::build(configuration.email_client).unwrap(); let email_client = EmailClient::build(configuration.email_client).unwrap();
worker_loop(connection_pool, email_client).await worker_loop(connection_pool, email_client).await
@@ -31,32 +28,48 @@ pub enum ExecutionOutcome {
EmptyQueue, EmptyQueue,
} }
#[tracing::instrument(
skip_all,
fields(
newsletter_issue_id=tracing::field::Empty,
subscriber_email=tracing::field::Empty
),
err
)]
pub async fn try_execute_task( pub async fn try_execute_task(
connection_pool: &PgPool, connection_pool: &PgPool,
email_client: &EmailClient, email_client: &EmailClient,
) -> Result<ExecutionOutcome, anyhow::Error> { ) -> Result<ExecutionOutcome, anyhow::Error> {
let task = dequeue_task(connection_pool).await?; let task = dequeue_task(connection_pool).await?;
let (mut transaction, task) = match task { if task.is_none() {
Some((transaction, task)) => (transaction, task), return Ok(ExecutionOutcome::EmptyQueue);
None => return Ok(ExecutionOutcome::EmptyQueue), }
}; let (transaction, task) = task.unwrap();
Span::current() Span::current()
.record("newsletter_issue_id", display(task.newsletter_issue_id)) .record("newsletter_issue_id", display(task.newsletter_issue_id))
.record("subscriber_email", display(&task.subscriber_email)); .record("subscriber_email", display(&task.subscriber_email));
match SubscriberEmail::parse(task.subscriber_email.clone()) { match SubscriberEmail::parse(task.subscriber_email.clone()) {
Ok(email) => { Ok(email) => {
execute_task( let mut issue = get_issue(connection_pool, task.newsletter_issue_id).await?;
connection_pool, issue.inject_unsubscribe_token(&task.unsubscribe_token);
&mut transaction, if let Err(e) = email_client
&task, .send_email(
email, &email,
email_client, &issue.title,
) &issue.html_content,
.await?; &issue.text_content,
)
.await
{
tracing::error!(
error.message = %e,
"Failed to deliver issue to confirmed subscriber. Skipping."
);
}
} }
Err(e) => { Err(e) => {
tracing::error!( tracing::error!(
error = %e, error.message = %e,
"Skipping a subscriber. Their stored contact details are invalid." "Skipping a subscriber. Their stored contact details are invalid."
); );
} }
@@ -72,7 +85,6 @@ pub async fn try_execute_task(
} }
struct NewsletterIssue { struct NewsletterIssue {
newsletter_issue_id: Uuid,
title: String, title: String,
text_content: String, text_content: String,
html_content: String, html_content: String,
@@ -83,30 +95,9 @@ impl NewsletterIssue {
self.text_content = self.text_content.replace("UNSUBSCRIBE_TOKEN", token); self.text_content = self.text_content.replace("UNSUBSCRIBE_TOKEN", token);
self.html_content = self.html_content.replace("UNSUBSCRIBE_TOKEN", token); self.html_content = self.html_content.replace("UNSUBSCRIBE_TOKEN", token);
} }
async fn inject_tracking_info(
&mut self,
transaction: &mut Transaction<'static, Postgres>,
) -> Result<(), anyhow::Error> {
let email_id = Uuid::new_v4();
let query = sqlx::query!(
r#"
INSERT INTO notifications_delivered (email_id, newsletter_issue_id)
VALUES ($1, $2)
"#,
email_id,
self.newsletter_issue_id
);
transaction
.execute(query)
.await
.context("Failed to store email tracking info.")?;
self.text_content = self.text_content.replace("EMAIL_ID", &email_id.to_string());
self.html_content = self.html_content.replace("EMAIL_ID", &email_id.to_string());
Ok(())
}
} }
#[tracing::instrument(skip_all)]
async fn get_issue( async fn get_issue(
connection_pool: &PgPool, connection_pool: &PgPool,
issue_id: Uuid, issue_id: Uuid,
@@ -114,7 +105,7 @@ async fn get_issue(
let issue = sqlx::query_as!( let issue = sqlx::query_as!(
NewsletterIssue, NewsletterIssue,
r#" r#"
SELECT newsletter_issue_id, title, text_content, html_content SELECT title, text_content, html_content
FROM newsletter_issues FROM newsletter_issues
WHERE newsletter_issue_id = $1 WHERE newsletter_issue_id = $1
"#, "#,
@@ -129,16 +120,16 @@ pub struct Task {
pub newsletter_issue_id: Uuid, pub newsletter_issue_id: Uuid,
pub subscriber_email: String, pub subscriber_email: String,
pub unsubscribe_token: String, pub unsubscribe_token: String,
pub kind: String,
} }
#[tracing::instrument(skip_all)]
async fn dequeue_task( async fn dequeue_task(
connection_pool: &PgPool, connection_pool: &PgPool,
) -> Result<Option<(Transaction<'static, Postgres>, Task)>, anyhow::Error> { ) -> Result<Option<(Transaction<'static, Postgres>, Task)>, anyhow::Error> {
let mut transaction = connection_pool.begin().await?; let mut transaction = connection_pool.begin().await?;
let query = sqlx::query!( let query = sqlx::query!(
r#" r#"
SELECT newsletter_issue_id, subscriber_email, unsubscribe_token, kind SELECT newsletter_issue_id, subscriber_email, unsubscribe_token
FROM issue_delivery_queue FROM issue_delivery_queue
FOR UPDATE FOR UPDATE
SKIP LOCKED SKIP LOCKED
@@ -151,7 +142,6 @@ async fn dequeue_task(
newsletter_issue_id: row.get("newsletter_issue_id"), newsletter_issue_id: row.get("newsletter_issue_id"),
subscriber_email: row.get("subscriber_email"), subscriber_email: row.get("subscriber_email"),
unsubscribe_token: row.get("unsubscribe_token"), unsubscribe_token: row.get("unsubscribe_token"),
kind: row.get("kind"),
}; };
Ok(Some((transaction, task))) Ok(Some((transaction, task)))
} else { } else {
@@ -159,35 +149,7 @@ async fn dequeue_task(
} }
} }
#[tracing::instrument( #[tracing::instrument(skip_all)]
name = "Executing task",
skip_all,
fields(email = %email),
)]
async fn execute_task(
connection_pool: &PgPool,
transaction: &mut Transaction<'static, Postgres>,
task: &Task,
email: SubscriberEmail,
email_client: &EmailClient,
) -> Result<(), anyhow::Error> {
let mut issue = get_issue(connection_pool, task.newsletter_issue_id).await?;
issue.inject_unsubscribe_token(&task.unsubscribe_token);
if task.kind == EmailType::NewPost.to_string() {
issue.inject_tracking_info(transaction).await?;
}
email_client
.send_email(
&email,
&issue.title,
&issue.html_content,
&issue.text_content,
)
.await
.context("Failed to deliver newsletter issue to subscriber..")?;
Ok(())
}
async fn delete_task( async fn delete_task(
mut transaction: Transaction<'static, Postgres>, mut transaction: Transaction<'static, Postgres>,
issue_id: Uuid, issue_id: Uuid,

View File

@@ -1,6 +1,5 @@
pub mod authentication; pub mod authentication;
pub mod configuration; pub mod configuration;
pub mod database_worker;
pub mod domain; pub mod domain;
pub mod email_client; pub mod email_client;
pub mod idempotency; pub mod idempotency;

View File

@@ -1,6 +1,6 @@
use zero2prod::{ use zero2prod::{
configuration::get_configuration, database_worker, issue_delivery_worker, startup::Application, configuration::get_configuration, issue_delivery_worker::run_worker_until_stopped,
telemetry::init_subscriber, startup::Application, telemetry::init_subscriber,
}; };
#[tokio::main] #[tokio::main]
@@ -11,16 +11,11 @@ async fn main() -> Result<(), anyhow::Error> {
let application = Application::build(configuration.clone()).await?; let application = Application::build(configuration.clone()).await?;
let application_task = tokio::spawn(application.run_until_stopped()); let application_task = tokio::spawn(application.run_until_stopped());
let database_worker_task = tokio::spawn(database_worker::run_until_stopped( let worker_task = tokio::spawn(run_worker_until_stopped(configuration));
configuration.database.with_db(),
));
let delivery_worker_task =
tokio::spawn(issue_delivery_worker::run_until_stopped(configuration));
tokio::select! { tokio::select! {
_ = application_task => {}, _ = application_task => {},
_ = database_worker_task => {}, _ = worker_task => {},
_ = delivery_worker_task => {},
}; };
Ok(()) Ok(())

View File

@@ -1,5 +1,4 @@
mod admin; mod admin;
mod comments;
mod health_check; mod health_check;
mod home; mod home;
mod login; mod login;
@@ -7,32 +6,26 @@ mod posts;
mod subscriptions; mod subscriptions;
mod subscriptions_confirm; mod subscriptions_confirm;
mod unsubscribe; mod unsubscribe;
mod users;
pub use admin::*; pub use admin::*;
use askama::Template; use askama::Template;
use axum::{ use axum::{
extract::FromRequestParts, http::HeaderMap,
http::{HeaderMap, request::Parts},
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
}; };
pub use comments::*;
pub use health_check::*; pub use health_check::*;
pub use home::*; pub use home::*;
pub use login::*; pub use login::*;
pub use posts::*; pub use posts::*;
use rand::{Rng, distr::Alphanumeric}; use rand::{Rng, distr::Alphanumeric};
use reqwest::StatusCode; use reqwest::StatusCode;
use serde::de::DeserializeOwned;
pub use subscriptions::*; pub use subscriptions::*;
pub use subscriptions_confirm::*; pub use subscriptions_confirm::*;
pub use unsubscribe::*; pub use unsubscribe::*;
pub use users::*;
use validator::ValidationErrors;
use crate::{ use crate::{
authentication::AuthError, authentication::AuthError,
templates::{ErrorTemplate, HtmlTemplate, MessageTemplate}, templates::{InternalErrorTemplate, MessageTemplate, NotFoundTemplate},
}; };
pub fn generate_token() -> String { pub fn generate_token() -> String {
@@ -68,8 +61,6 @@ pub enum AppError {
FormError(#[source] anyhow::Error), FormError(#[source] anyhow::Error),
#[error("Authentication is required.")] #[error("Authentication is required.")]
NotAuthenticated, NotAuthenticated,
#[error("Handler extractor failed.")]
Extractor(#[source] anyhow::Error),
} }
impl From<anyhow::Error> for AppError { impl From<anyhow::Error> for AppError {
@@ -113,16 +104,19 @@ impl IntoResponse for AppError {
full_page, full_page,
} => { } => {
let html = if *full_page { let html = if *full_page {
Html(ErrorTemplate::InternalServer.render().unwrap()) Html(InternalErrorTemplate.render().unwrap())
} else { } else {
let template = let template = MessageTemplate::Error {
MessageTemplate::error("An internal server error occured.".into()); message: "An internal server error occured.".into(),
};
Html(template.render().unwrap()) Html(template.render().unwrap())
}; };
html.into_response() html.into_response()
} }
AppError::FormError(error) => { AppError::FormError(error) => {
let template = MessageTemplate::error(error.to_string()); let template = MessageTemplate::Error {
message: error.to_string(),
};
Html(template.render().unwrap()).into_response() Html(template.render().unwrap()).into_response()
} }
AppError::NotAuthenticated => { AppError::NotAuthenticated => {
@@ -130,7 +124,6 @@ impl IntoResponse for AppError {
headers.insert("HX-Redirect", "/login".parse().unwrap()); headers.insert("HX-Redirect", "/login".parse().unwrap());
(StatusCode::OK, headers).into_response() (StatusCode::OK, headers).into_response()
} }
AppError::Extractor(_) => not_found_html(),
} }
} }
} }
@@ -162,68 +155,9 @@ impl From<AuthError> for AppError {
} }
pub async fn not_found() -> Response { pub async fn not_found() -> Response {
tracing::error!("Not found."); (
not_found_html() StatusCode::NOT_FOUND,
} Html(NotFoundTemplate.render().unwrap()),
)
pub fn not_found_html() -> Response { .into_response()
let template = HtmlTemplate(ErrorTemplate::NotFound);
(StatusCode::NOT_FOUND, template).into_response()
}
pub struct Path<T>(T);
impl<T, S> FromRequestParts<S> for Path<T>
where
T: DeserializeOwned + Send,
S: Send + Sync,
{
type Rejection = AppError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
match axum::extract::Path::<T>::from_request_parts(parts, state).await {
Ok(value) => Ok(Self(value.0)),
Err(rejection) => Err(AppError::Extractor(anyhow::anyhow!(
"Path rejection: {:?}",
rejection
))),
}
}
}
pub struct Query<T>(pub T);
impl<T, S> FromRequestParts<S> for Query<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = AppError;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
match axum::extract::Query::<T>::from_request_parts(parts, state).await {
Ok(value) => Ok(Self(value.0)),
Err(rejection) => Err(AppError::Extractor(anyhow::anyhow!(
"Query rejection: {:?}",
rejection
))),
}
}
}
pub fn join_error_messages(e: ValidationErrors) -> String {
let error_messages: Vec<_> = e
.field_errors()
.iter()
.flat_map(|(field, errors)| {
errors.iter().map(move |error| {
error
.message
.as_ref()
.map(|msg| msg.to_string())
.unwrap_or(format!("Invalid field: {}", field))
})
})
.collect();
error_messages.join("\n")
} }

View File

@@ -3,27 +3,18 @@ mod dashboard;
mod logout; mod logout;
mod newsletters; mod newsletters;
mod posts; mod posts;
mod subscribers;
use crate::{ use crate::{
authentication::AuthenticatedUser, authentication::AuthenticatedUser,
routes::{AppError, error_chain_fmt}, routes::{AppError, error_chain_fmt},
session_state::TypedSession, session_state::TypedSession,
templates::{HtmlTemplate, MessageTemplate},
};
use anyhow::Context;
use axum::response::Redirect;
use axum::{
extract::Request,
middleware::Next,
response::{IntoResponse, Response},
}; };
use axum::{extract::Request, middleware::Next, response::Response};
pub use change_password::*; pub use change_password::*;
pub use dashboard::*; pub use dashboard::*;
pub use logout::*; pub use logout::*;
pub use newsletters::*; pub use newsletters::*;
pub use posts::*; pub use posts::*;
pub use subscribers::*;
#[derive(thiserror::Error)] #[derive(thiserror::Error)]
pub enum AdminError { pub enum AdminError {
@@ -32,7 +23,7 @@ pub enum AdminError {
#[error("Trying to access admin dashboard without authentication.")] #[error("Trying to access admin dashboard without authentication.")]
NotAuthenticated, NotAuthenticated,
#[error("Updating password failed.")] #[error("Updating password failed.")]
ChangePassword(anyhow::Error), ChangePassword(String),
#[error("Could not publish newsletter.")] #[error("Could not publish newsletter.")]
Publish(#[source] anyhow::Error), Publish(#[source] anyhow::Error),
#[error("The idempotency key was invalid.")] #[error("The idempotency key was invalid.")]
@@ -50,17 +41,11 @@ pub async fn require_auth(
mut request: Request, mut request: Request,
next: Next, next: Next,
) -> Result<Response, AppError> { ) -> Result<Response, AppError> {
let user_id = match session let user_id = session
.get_user_id() .get_user_id()
.await .await
.map_err(|e| AdminError::UnexpectedError(e.into()))? .map_err(|e| AdminError::UnexpectedError(e.into()))?
{ .ok_or(AdminError::NotAuthenticated)?;
None => {
tracing::error!("Not authenticated. Redirecting to /login.");
return Ok(Redirect::to("/login").into_response());
}
Some(user_id) => user_id,
};
let username = session let username = session
.get_username() .get_username()
.await .await
@@ -68,36 +53,10 @@ pub async fn require_auth(
.ok_or(AdminError::UnexpectedError(anyhow::anyhow!( .ok_or(AdminError::UnexpectedError(anyhow::anyhow!(
"Could not find username in session." "Could not find username in session."
)))?; )))?;
let role = session
.get_role()
.await
.context("Error retrieving user role in session.")?
.ok_or(anyhow::anyhow!("Could not find user role in session."))?;
request.extensions_mut().insert(AuthenticatedUser { request
user_id, .extensions_mut()
username, .insert(AuthenticatedUser { user_id, username });
role,
});
Ok(next.run(request).await) Ok(next.run(request).await)
} }
pub async fn require_admin(
session: TypedSession,
request: Request,
next: Next,
) -> Result<Response, AppError> {
if session
.has_admin_permissions()
.await
.context("Error retrieving user role in session.")?
{
Ok(next.run(request).await)
} else {
Ok(HtmlTemplate(MessageTemplate::error(
"This action requires administrator privileges.".into(),
))
.into_response())
}
}

View File

@@ -20,9 +20,7 @@ pub struct PasswordFormData {
} }
pub async fn change_password( pub async fn change_password(
Extension(AuthenticatedUser { Extension(AuthenticatedUser { user_id, username }): Extension<AuthenticatedUser>,
user_id, username, ..
}): Extension<AuthenticatedUser>,
State(AppState { State(AppState {
connection_pool, .. connection_pool, ..
}): State<AppState>, }): State<AppState>,
@@ -33,16 +31,16 @@ pub async fn change_password(
password: form.current_password, password: form.current_password,
}; };
if form.new_password.expose_secret() != form.new_password_check.expose_secret() { if form.new_password.expose_secret() != form.new_password_check.expose_secret() {
Err(AdminError::ChangePassword(anyhow::anyhow!( Err(AdminError::ChangePassword(
"You entered two different passwords - the field values must match." "You entered two different passwords - the field values must match.".to_string(),
)) )
.into()) .into())
} else if let Err(e) = validate_credentials(credentials, &connection_pool).await { } else if let Err(e) = validate_credentials(credentials, &connection_pool).await {
match e { match e {
AuthError::UnexpectedError(error) => Err(AdminError::UnexpectedError(error).into()), AuthError::UnexpectedError(error) => Err(AdminError::UnexpectedError(error).into()),
AuthError::InvalidCredentials(_) => Err(AdminError::ChangePassword(anyhow::anyhow!( AuthError::InvalidCredentials(_) => Err(AdminError::ChangePassword(
"The current password is incorrect." "The current password is incorrect.".to_string(),
)) )
.into()), .into()),
} }
} else if let Err(e) = verify_password(form.new_password.expose_secret()) { } else if let Err(e) = verify_password(form.new_password.expose_secret()) {
@@ -50,15 +48,17 @@ pub async fn change_password(
} else { } else {
authentication::change_password(user_id, form.new_password, &connection_pool) authentication::change_password(user_id, form.new_password, &connection_pool)
.await .await
.map_err(AdminError::ChangePassword)?; .map_err(|e| AdminError::ChangePassword(e.to_string()))?;
let template = MessageTemplate::success("Your password has been changed.".to_string()); let template = MessageTemplate::Success {
message: "Your password has been changed.".to_string(),
};
Ok(Html(template.render().unwrap()).into_response()) Ok(Html(template.render().unwrap()).into_response())
} }
} }
pub fn verify_password(password: &str) -> Result<(), anyhow::Error> { fn verify_password(password: &str) -> Result<(), String> {
if password.len() < 12 || password.len() > 128 { if password.len() < 12 || password.len() > 128 {
anyhow::bail!("The password must contain between 12 and 128 characters."); return Err("The password must contain between 12 and 128 characters.".into());
} }
Ok(()) Ok(())
} }

View File

@@ -1,133 +1,20 @@
use crate::routes::{ use crate::{authentication::AuthenticatedUser, templates::DashboardTemplate};
COMMENTS_PER_PAGE, POSTS_PER_PAGE, SUBS_PER_PAGE, get_comments_count, get_comments_page,
get_posts_count, get_posts_page, get_users,
};
use crate::{
authentication::AuthenticatedUser,
routes::{AppError, get_max_page, get_subs, get_total_subs},
startup::AppState,
templates::DashboardTemplate,
};
use anyhow::Context;
use askama::Template; use askama::Template;
use axum::{ use axum::{
Extension, Extension,
extract::State,
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
}; };
use sqlx::PgPool;
use uuid::Uuid; use uuid::Uuid;
pub struct DashboardStats {
pub subscribers: i64,
pub posts: i64,
pub notifications_sent: i64,
pub open_rate: f64,
}
impl DashboardStats {
pub fn formatted_rate(&self) -> String {
format!("{:.1}%", self.open_rate)
}
}
pub async fn admin_dashboard( pub async fn admin_dashboard(
State(AppState { Extension(AuthenticatedUser { username, .. }): Extension<AuthenticatedUser>,
connection_pool, .. ) -> Response {
}): State<AppState>,
Extension(user): Extension<AuthenticatedUser>,
) -> Result<Response, AppError> {
let stats = get_stats(&connection_pool).await?;
let idempotency_key_1 = Uuid::new_v4().to_string(); let idempotency_key_1 = Uuid::new_v4().to_string();
let idempotency_key_2 = Uuid::new_v4().to_string(); let idempotency_key_2 = Uuid::new_v4().to_string();
let current_page = 1;
let subscribers = get_subs(&connection_pool, current_page)
.await
.context("Could not fetch subscribers from database.")
.map_err(AppError::unexpected_message)?;
let subs_count = get_total_subs(&connection_pool)
.await
.context("Could not fetch total subscribers count from the database.")?;
let max_page = get_max_page(subs_count, SUBS_PER_PAGE);
let users = get_users(&connection_pool)
.await
.context("Could not fetch users")?;
let posts = get_posts_page(&connection_pool, 1)
.await
.context("Could not fetch posts.")?;
let posts_current_page = 1;
let posts_count = get_posts_count(&connection_pool)
.await
.context("Could not fetch posts count.")?;
let posts_max_page = get_max_page(posts_count, POSTS_PER_PAGE);
let comments_current_page = 1;
let comments = get_comments_page(&connection_pool, comments_current_page)
.await
.context("Could not fetch comments.")?;
let comments_count = get_comments_count(&connection_pool)
.await
.context("Could not fetch comments count.")?;
let comments_max_page = get_max_page(comments_count, COMMENTS_PER_PAGE);
let template = DashboardTemplate { let template = DashboardTemplate {
user, username,
idempotency_key_1, idempotency_key_1,
idempotency_key_2, idempotency_key_2,
stats,
subscribers,
current_page,
max_page,
count: subs_count,
users,
posts,
posts_current_page,
posts_max_page,
posts_count,
comments,
comments_current_page,
comments_max_page,
comments_count,
}; };
Ok(Html(template.render().unwrap()).into_response()) Html(template.render().unwrap()).into_response()
}
#[tracing::instrument("Computing dashboard stats", skip_all)]
async fn get_stats(connection_pool: &PgPool) -> Result<DashboardStats, anyhow::Error> {
let subscribers =
sqlx::query_scalar!("SELECT count(*) FROM subscriptions WHERE status = 'confirmed'")
.fetch_one(connection_pool)
.await
.context("Failed to fetch subscribers count.")?
.unwrap_or(0);
let posts = sqlx::query_scalar!("SELECT count(*) FROM posts")
.fetch_one(connection_pool)
.await
.context("Failed to fetch posts count.")?
.unwrap_or(0);
let notifications_sent = sqlx::query_scalar!("SELECT count(*) FROM notifications_delivered")
.fetch_one(connection_pool)
.await
.context("Failed to fetch notifications sent count.")?
.unwrap_or(0);
let opened =
sqlx::query_scalar!("SELECT count(*) FROM notifications_delivered WHERE opened = TRUE")
.fetch_one(connection_pool)
.await
.context("Failed to fetch notifications sent count.")?
.unwrap_or(0);
let open_rate = if notifications_sent == 0 {
0.0
} else {
(opened as f64) / (notifications_sent as f64) * 100.0
};
Ok(DashboardStats {
subscribers,
posts,
notifications_sent,
open_rate,
})
} }

View File

@@ -1,4 +1,5 @@
use crate::{ use crate::{
authentication::AuthenticatedUser,
idempotency::{IdempotencyKey, save_response, try_processing}, idempotency::{IdempotencyKey, save_response, try_processing},
routes::{AdminError, AppError}, routes::{AdminError, AppError},
startup::AppState, startup::AppState,
@@ -7,12 +8,11 @@ use crate::{
use anyhow::Context; use anyhow::Context;
use askama::Template; use askama::Template;
use axum::{ use axum::{
Form, Extension, Form,
extract::State, extract::State,
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
}; };
use sqlx::{Executor, Postgres, Transaction}; use sqlx::{Executor, Postgres, Transaction};
use std::fmt::Display;
use uuid::Uuid; use uuid::Uuid;
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
@@ -23,14 +23,13 @@ pub struct BodyData {
idempotency_key: String, idempotency_key: String,
} }
#[tracing::instrument(name = "Creating newsletter isue", skip_all, fields(issue_id = tracing::field::Empty))] #[tracing::instrument(skip_all)]
pub async fn insert_newsletter_issue( pub async fn insert_newsletter_issue(
transaction: &mut Transaction<'static, Postgres>, transaction: &mut Transaction<'static, Postgres>,
title: &str, title: &str,
email_template: &dyn EmailTemplate, email_template: &dyn EmailTemplate,
) -> Result<Uuid, sqlx::Error> { ) -> Result<Uuid, sqlx::Error> {
let newsletter_issue_id = Uuid::new_v4(); let newsletter_issue_id = Uuid::new_v4();
tracing::Span::current().record("issue_id", newsletter_issue_id.to_string());
let query = sqlx::query!( let query = sqlx::query!(
r#" r#"
INSERT INTO newsletter_issues ( INSERT INTO newsletter_issues (
@@ -47,53 +46,36 @@ pub async fn insert_newsletter_issue(
Ok(newsletter_issue_id) Ok(newsletter_issue_id)
} }
#[derive(Debug)] #[tracing::instrument(skip_all)]
pub enum EmailType {
NewPost,
Newsletter,
}
impl Display for EmailType {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EmailType::NewPost => write!(f, "new_post"),
EmailType::Newsletter => write!(f, "newsletter"),
}
}
}
#[tracing::instrument(name = "Adding new task to queue", skip(transaction))]
pub async fn enqueue_delivery_tasks( pub async fn enqueue_delivery_tasks(
transaction: &mut Transaction<'static, Postgres>, transaction: &mut Transaction<'static, Postgres>,
newsletter_issue_id: Uuid, newsletter_issue_id: Uuid,
kind: EmailType,
) -> Result<(), sqlx::Error> { ) -> Result<(), sqlx::Error> {
let query = sqlx::query!( let query = sqlx::query!(
r#" r#"
INSERT INTO issue_delivery_queue ( INSERT INTO issue_delivery_queue (
newsletter_issue_id, newsletter_issue_id,
subscriber_email, subscriber_email,
unsubscribe_token, unsubscribe_token
kind
) )
SELECT $1, email, unsubscribe_token, $2 SELECT $1, email, unsubscribe_token
FROM subscriptions FROM subscriptions
WHERE status = 'confirmed' WHERE status = 'confirmed'
"#, "#,
newsletter_issue_id, newsletter_issue_id,
kind.to_string()
); );
transaction.execute(query).await?; transaction.execute(query).await?;
Ok(()) Ok(())
} }
#[tracing::instrument(name = "Publishing a newsletter", skip_all, fields(title = %form.title))] #[tracing::instrument(name = "Publishing a newsletter", skip(connection_pool, form))]
pub async fn publish_newsletter( pub async fn publish_newsletter(
State(AppState { State(AppState {
connection_pool, connection_pool,
base_url, base_url,
.. ..
}): State<AppState>, }): State<AppState>,
Extension(AuthenticatedUser { user_id, .. }): Extension<AuthenticatedUser>,
Form(form): Form<BodyData>, Form(form): Form<BodyData>,
) -> Result<Response, AppError> { ) -> Result<Response, AppError> {
validate_form(&form).map_err(|e| AdminError::Publish(anyhow::anyhow!(e)))?; validate_form(&form).map_err(|e| AdminError::Publish(anyhow::anyhow!(e)))?;
@@ -103,7 +85,7 @@ pub async fn publish_newsletter(
.try_into() .try_into()
.map_err(AdminError::Idempotency)?; .map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? { let mut transaction = match try_processing(&connection_pool, &idempotency_key, user_id).await? {
crate::idempotency::NextAction::StartProcessing(t) => t, crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => { crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response); return Ok(response);
@@ -120,25 +102,28 @@ pub async fn publish_newsletter(
.await .await
.context("Failed to store newsletter issue details.")?; .context("Failed to store newsletter issue details.")?;
enqueue_delivery_tasks(&mut transaction, issue_id, EmailType::Newsletter) enqueue_delivery_tasks(&mut transaction, issue_id)
.await .await
.context("Failed to enqueue delivery tasks.")?; .context("Failed to enqueue delivery tasks.")?;
let message = String::from("Your email has been queued for delivery."); let message = format!(
let template = MessageTemplate::success(message); r#"The newsletter issue "{}" has been published!"#,
form.title
);
let template = MessageTemplate::Success { message };
let response = Html(template.render().unwrap()).into_response(); let response = Html(template.render().unwrap()).into_response();
let response = save_response(transaction, &idempotency_key, response) let response = save_response(transaction, &idempotency_key, user_id, response)
.await .await
.map_err(AdminError::UnexpectedError)?; .map_err(AdminError::UnexpectedError)?;
Ok(response) Ok(response)
} }
fn validate_form(form: &BodyData) -> Result<(), anyhow::Error> { fn validate_form(form: &BodyData) -> Result<(), &'static str> {
if form.title.is_empty() { if form.title.is_empty() {
anyhow::bail!("The title was empty."); return Err("The title was empty.");
} }
if form.html.is_empty() || form.text.is_empty() { if form.html.is_empty() || form.text.is_empty() {
anyhow::bail!("The content was empty."); return Err("The content was empty.");
} }
Ok(()) Ok(())
} }

View File

@@ -1,9 +1,7 @@
use crate::{ use crate::{
authentication::AuthenticatedUser, authentication::AuthenticatedUser,
idempotency::{IdempotencyKey, save_response, try_processing}, idempotency::{IdempotencyKey, save_response, try_processing},
routes::{ routes::{AdminError, AppError, enqueue_delivery_tasks, insert_newsletter_issue},
AdminError, AppError, EmailType, Path, enqueue_delivery_tasks, insert_newsletter_issue,
},
startup::AppState, startup::AppState,
templates::{MessageTemplate, NewPostEmailTemplate}, templates::{MessageTemplate, NewPostEmailTemplate},
}; };
@@ -33,11 +31,7 @@ fn validate_form(form: &CreatePostForm) -> Result<(), anyhow::Error> {
} }
} }
#[tracing::instrument( #[tracing::instrument(name = "Creating a post", skip(connection_pool, form))]
name = "Publishing new blog post",
skip(connection_pool, base_url, form)
fields(title = %form.title)
)]
pub async fn create_post( pub async fn create_post(
State(AppState { State(AppState {
connection_pool, connection_pool,
@@ -54,7 +48,7 @@ pub async fn create_post(
.try_into() .try_into()
.map_err(AdminError::Idempotency)?; .map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? { let mut transaction = match try_processing(&connection_pool, &idempotency_key, user_id).await? {
crate::idempotency::NextAction::StartProcessing(t) => t, crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => { crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response); return Ok(response);
@@ -69,19 +63,24 @@ pub async fn create_post(
.await .await
.context("Failed to create newsletter.")?; .context("Failed to create newsletter.")?;
enqueue_delivery_tasks(&mut transaction, newsletter_uuid, EmailType::NewPost) enqueue_delivery_tasks(&mut transaction, newsletter_uuid)
.await .await
.context("Failed to enqueue delivery tasks.")?; .context("Failed to enqueue delivery tasks.")?;
let template = MessageTemplate::success("Your new post has been published!".into()); let template = MessageTemplate::Success {
message: "Your new post has been saved. Subscribers will be notified.".into(),
};
let response = Html(template.render().unwrap()).into_response(); let response = Html(template.render().unwrap()).into_response();
let response = save_response(transaction, &idempotency_key, response) let response = save_response(transaction, &idempotency_key, user_id, response)
.await .await
.map_err(AdminError::UnexpectedError)?; .map_err(AdminError::UnexpectedError)?;
Ok(response) Ok(response)
} }
#[tracing::instrument(name = "Saving new blog post in the database", skip_all)] #[tracing::instrument(
name = "Saving new post in the database",
skip(transaction, title, content, author)
)]
pub async fn insert_post( pub async fn insert_post(
transaction: &mut Transaction<'static, Postgres>, transaction: &mut Transaction<'static, Postgres>,
title: &str, title: &str,
@@ -104,7 +103,10 @@ pub async fn insert_post(
Ok(post_id) Ok(post_id)
} }
#[tracing::instrument(name = "Creating newsletter for new post", skip_all)] #[tracing::instrument(
name = "Creating newsletter for new post",
skip(transaction, post_title, post_id)
)]
pub async fn create_newsletter( pub async fn create_newsletter(
transaction: &mut Transaction<'static, Postgres>, transaction: &mut Transaction<'static, Postgres>,
base_url: &str, base_url: &str,
@@ -119,24 +121,3 @@ pub async fn create_newsletter(
}; };
insert_newsletter_issue(transaction, post_title, &template).await insert_newsletter_issue(transaction, post_title, &template).await
} }
pub async fn delete_post(
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(post_id): Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!("DELETE FROM posts WHERE post_id = $1", post_id)
.execute(&connection_pool)
.await
.context("Failed to delete post from database.")
.map_err(AppError::unexpected_message)?;
if res.rows_affected() > 1 {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the post in the database."
)))
} else {
let template = MessageTemplate::success("The post has been deleted.".into());
Ok(template.render().unwrap().into_response())
}
}

View File

@@ -1,116 +0,0 @@
use crate::{
domain::SubscriberEntry,
routes::{AppError, Path, Query},
startup::AppState,
templates::{MessageTemplate, SubListTemplate},
};
use anyhow::Context;
use askama::Template;
use axum::{
extract::State,
response::{Html, IntoResponse, Response},
};
use sqlx::PgPool;
use uuid::Uuid;
pub const SUBS_PER_PAGE: i64 = 5;
#[tracing::instrument(name = "Retrieving subscribers from database", skip(connection_pool))]
pub async fn get_subscribers_page(
State(AppState {
connection_pool, ..
}): State<AppState>,
Query(SubsQueryParams { page }): Query<SubsQueryParams>,
) -> Result<Response, AppError> {
let count = get_total_subs(&connection_pool)
.await
.context("Could not fetch total subscribers count from the database.")
.map_err(AppError::unexpected_message)?;
let max_page = get_max_page(count, SUBS_PER_PAGE);
let subscribers = get_subs(&connection_pool, page)
.await
.context("Could not fetch subscribers data.")
.map_err(AppError::unexpected_message)?;
let template = SubListTemplate {
subscribers,
current_page: page,
max_page,
};
Ok(Html(template.render().unwrap()).into_response())
}
#[tracing::instrument(
name = "Deleting subscriber from database",
skip(connection_pool),
fields(email=tracing::field::Empty)
)]
pub async fn delete_subscriber(
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(subscriber_id): Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!(
"DELETE FROM subscriptions WHERE id = $1 RETURNING email",
subscriber_id
)
.fetch_optional(&connection_pool)
.await
.context("Failed to delete subscriber from database.")
.map_err(AppError::unexpected_message)?;
if let Some(record) = res {
tracing::Span::current().record("email", tracing::field::display(&record.email));
let template = MessageTemplate::success(format!(
"The subscriber with email '{}' has been deleted.",
record.email
));
Ok(template.render().unwrap().into_response())
} else {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the subscriber in the database."
)))
}
}
#[tracing::instrument(
name = "Retrieving next subscribers in database",
skip(connection_pool),
fields(offset = tracing::field::Empty)
)]
pub async fn get_subs(
connection_pool: &PgPool,
page: i64,
) -> Result<Vec<SubscriberEntry>, sqlx::Error> {
let offset = (page - 1) * SUBS_PER_PAGE;
tracing::Span::current().record("offset", tracing::field::display(&offset));
let subscribers = sqlx::query_as!(
SubscriberEntry,
"SELECT * FROM subscriptions ORDER BY subscribed_at DESC LIMIT $1 OFFSET $2",
SUBS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
Ok(subscribers)
}
pub async fn get_total_subs(connection_pool: &PgPool) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM subscriptions")
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}
pub fn get_max_page(count: i64, num_per_page: i64) -> i64 {
let mut max_page = count.div_euclid(num_per_page);
if count % num_per_page > 0 {
max_page += 1;
}
max_page
}
#[derive(serde::Deserialize)]
pub struct SubsQueryParams {
page: i64,
}

View File

@@ -1,264 +0,0 @@
use crate::idempotency::{IdempotencyKey, save_response, try_processing};
use crate::routes::{AdminError, get_max_page};
use crate::templates::CommentsPageDashboardTemplate;
use crate::{
domain::CommentEntry,
routes::AppError,
startup::AppState,
templates::{CommentsList, HtmlTemplate, MessageTemplate},
};
use anyhow::Context;
use askama::Template;
use axum::{
Form,
extract::{Path, Query, State},
response::{IntoResponse, Response},
};
use sqlx::{Executor, PgPool, Postgres, Transaction};
use uuid::Uuid;
#[derive(serde::Deserialize)]
pub struct CommentPathParam {
post_id: Uuid,
}
#[derive(serde::Deserialize)]
pub struct CommentForm {
pub author: Option<String>,
pub content: String,
pub idempotency_key: String,
pub user_id: Option<Uuid>,
}
#[tracing::instrument(name = "Posting new comment", skip_all, fields(post_id = %post_id))]
pub async fn post_comment(
Path(CommentPathParam { post_id }): Path<CommentPathParam>,
State(AppState {
connection_pool, ..
}): State<AppState>,
Form(form): Form<CommentForm>,
) -> Result<Response, AppError> {
validate_form(&form)?;
let idempotency_key: IdempotencyKey = form
.idempotency_key
.try_into()
.map_err(AdminError::Idempotency)?;
let mut transaction = match try_processing(&connection_pool, &idempotency_key).await? {
crate::idempotency::NextAction::StartProcessing(t) => t,
crate::idempotency::NextAction::ReturnSavedResponse(response) => {
return Ok(response);
}
};
insert_comment(
&mut transaction,
post_id,
form.author,
form.user_id,
form.content,
)
.await
.context("Could not insert comment into database.")?;
let template = HtmlTemplate(MessageTemplate::success(
"Your comment has been posted.".into(),
));
let response = template.into_response();
let response = save_response(transaction, &idempotency_key, response).await?;
Ok(response)
}
fn validate_form(form: &CommentForm) -> Result<(), anyhow::Error> {
if form.content.is_empty() {
anyhow::bail!("Comment content cannot be empty.");
}
Ok(())
}
#[tracing::instrument(name = "Inserting new comment in database", skip_all, fields(comment_id = tracing::field::Empty))]
async fn insert_comment(
transaction: &mut Transaction<'static, Postgres>,
post_id: Uuid,
author: Option<String>,
user_id: Option<Uuid>,
content: String,
) -> Result<Uuid, sqlx::Error> {
let author = if user_id.is_some() {
None
} else {
author
.filter(|s| !s.trim().is_empty())
.map(|s| s.trim().to_string())
};
let content = content.trim();
let comment_id = Uuid::new_v4();
tracing::Span::current().record("comment_id", comment_id.to_string());
let query = sqlx::query!(
"
INSERT INTO comments (user_id, comment_id, post_id, author, content)
VALUES ($1, $2, $3, $4, $5)
",
user_id,
comment_id,
post_id,
author,
content,
);
transaction.execute(query).await?;
Ok(comment_id)
}
pub const COMMENTS_PER_PAGE: i64 = 5;
#[derive(serde::Deserialize)]
pub struct GetCommentsQueryParams {
page: i64,
}
#[tracing::instrument(name = "Fetching comments", skip(connection_pool))]
pub async fn get_comments(
Path(CommentPathParam { post_id }): Path<CommentPathParam>,
Query(GetCommentsQueryParams { page }): Query<GetCommentsQueryParams>,
State(AppState {
connection_pool, ..
}): State<AppState>,
) -> Result<Response, AppError> {
let comments = get_comments_page_for_post(&connection_pool, post_id, page)
.await
.context("Could not fetch comments.")?;
let count = get_comments_count_for_post(&connection_pool, post_id)
.await
.context("Could not fetch comments count")?;
let max_page = get_max_page(count, COMMENTS_PER_PAGE);
let template = HtmlTemplate(CommentsList {
comments,
current_page: page,
max_page,
});
Ok(template.into_response())
}
#[tracing::instrument(name = "Fetching all comments", skip(connection_pool))]
pub async fn get_all_comments(
Query(GetCommentsQueryParams { page }): Query<GetCommentsQueryParams>,
State(AppState {
connection_pool, ..
}): State<AppState>,
) -> Result<Response, AppError> {
let comments = get_comments_page(&connection_pool, page)
.await
.context("Could not fetch comments.")?;
let count = get_comments_count(&connection_pool)
.await
.context("Could not fetch comments count")?;
let comments_max_page = get_max_page(count, COMMENTS_PER_PAGE);
let template = HtmlTemplate(CommentsPageDashboardTemplate {
comments,
comments_current_page: page,
comments_max_page,
});
Ok(template.into_response())
}
pub async fn delete_comment(
State(AppState {
connection_pool, ..
}): State<AppState>,
crate::routes::Path(comment_id): crate::routes::Path<Uuid>,
) -> Result<Response, AppError> {
let res = sqlx::query!("DELETE FROM comments WHERE comment_id = $1", comment_id)
.execute(&connection_pool)
.await
.context("Failed to delete comment from database.")
.map_err(AppError::unexpected_message)?;
if res.rows_affected() > 1 {
Err(AppError::unexpected_message(anyhow::anyhow!(
"We could not find the comment in the database."
)))
} else {
let template = MessageTemplate::success("The comment has been deleted.".into());
Ok(template.render().unwrap().into_response())
}
}
pub async fn get_comments_page_for_post(
connection_pool: &PgPool,
post_id: Uuid,
page: i64,
) -> Result<Vec<CommentEntry>, sqlx::Error> {
let offset = (page - 1) * COMMENTS_PER_PAGE;
let mut comments = sqlx::query_as!(
CommentEntry,
r#"
SELECT c.user_id as "user_id?", u.username as "username?", c.comment_id, c.post_id, c.author, c.content, c.published_at
FROM comments c
LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL
WHERE c.post_id = $1
ORDER BY c.published_at DESC
LIMIT $2
OFFSET $3
"#,
post_id,
COMMENTS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
for comment in comments.iter_mut() {
if let Some(user_id) = comment.user_id {
let record = sqlx::query!(
"SELECT username, full_name FROM users WHERE user_id = $1",
user_id
)
.fetch_one(connection_pool)
.await?;
let author = record.full_name.unwrap_or(record.username);
comment.author = Some(author);
}
}
Ok(comments)
}
pub async fn get_comments_count_for_post(
connection_pool: &PgPool,
post_id: Uuid,
) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM comments WHERE post_id = $1", post_id)
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}
pub async fn get_comments_page(
connection_pool: &PgPool,
page: i64,
) -> Result<Vec<CommentEntry>, sqlx::Error> {
let offset = (page - 1) * COMMENTS_PER_PAGE;
let comments = sqlx::query_as!(
CommentEntry,
r#"
SELECT c.user_id as "user_id?", u.username as "username?", c.comment_id, c.post_id, c.author, c.content, c.published_at
FROM comments c
LEFT JOIN users u ON c.user_id = u.user_id AND c.user_id IS NOT NULL
ORDER BY published_at DESC
LIMIT $1
OFFSET $2
"#,
COMMENTS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await?;
Ok(comments)
}
pub async fn get_comments_count(connection_pool: &PgPool) -> Result<i64, sqlx::Error> {
let count = sqlx::query_scalar!("SELECT count(*) FROM comments")
.fetch_one(connection_pool)
.await?
.unwrap_or(0);
Ok(count)
}

View File

@@ -1,5 +1,5 @@
use axum::http::StatusCode; use axum::{http::StatusCode, response::IntoResponse};
pub async fn health_check() -> StatusCode { pub async fn health_check() -> impl IntoResponse {
StatusCode::OK StatusCode::OK
} }

View File

@@ -1,7 +1,8 @@
use crate::templates::{HomeTemplate, HtmlTemplate}; use askama::Template;
use axum::response::{IntoResponse, Response}; use axum::response::Html;
pub async fn home() -> Response { use crate::templates::HomeTemplate;
let template = HtmlTemplate(HomeTemplate);
template.into_response() pub async fn home() -> Html<String> {
Html(HomeTemplate.render().unwrap())
} }

View File

@@ -15,8 +15,6 @@ use axum::{
}; };
use axum::{http::StatusCode, response::Redirect}; use axum::{http::StatusCode, response::Redirect};
use secrecy::SecretString; use secrecy::SecretString;
use sqlx::PgPool;
use uuid::Uuid;
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
pub struct LoginFormData { pub struct LoginFormData {
@@ -31,13 +29,12 @@ pub async fn get_login(session: TypedSession) -> Result<Response, AppError> {
.context("Failed to retrieve user id from data store.")? .context("Failed to retrieve user id from data store.")?
.is_some() .is_some()
{ {
Ok(Redirect::to("dashboard").into_response()) Ok(Redirect::to("/admin/dashboard").into_response())
} else { } else {
Ok(Html(LoginTemplate.render().unwrap()).into_response()) Ok(Html(LoginTemplate.render().unwrap()).into_response())
} }
} }
#[tracing::instrument(name = "Authenticating user", skip_all, fields(name = %form.username))]
pub async fn post_login( pub async fn post_login(
session: TypedSession, session: TypedSession,
State(AppState { State(AppState {
@@ -50,11 +47,8 @@ pub async fn post_login(
password: form.password, password: form.password,
}; };
tracing::Span::current().record("username", tracing::field::display(&credentials.username)); tracing::Span::current().record("username", tracing::field::display(&credentials.username));
let (user_id, role) = validate_credentials(credentials, &connection_pool).await?; let user_id = validate_credentials(credentials, &connection_pool).await?;
tracing::Span::current().record("user_id", tracing::field::display(&user_id)); tracing::Span::current().record("user_id", tracing::field::display(&user_id));
record_login(&connection_pool, &user_id)
.await
.context("Failed to register new login event.")?;
session.renew().await.context("Failed to renew session.")?; session.renew().await.context("Failed to renew session.")?;
session session
@@ -65,20 +59,8 @@ pub async fn post_login(
.insert_username(form.username) .insert_username(form.username)
.await .await
.context("Failed to insert username in session data store.")?; .context("Failed to insert username in session data store.")?;
session
.insert_role(role)
.await
.context("Failed to insert role in session data store.")?;
let mut headers = HeaderMap::new(); let mut headers = HeaderMap::new();
headers.insert("HX-Redirect", "/dashboard".parse().unwrap()); headers.insert("HX-Redirect", "/admin/dashboard".parse().unwrap());
Ok((StatusCode::OK, headers).into_response()) Ok((StatusCode::OK, headers).into_response())
} }
#[tracing::instrument(name = "Recording new login event", skip_all, fields(user_id = %user_id))]
async fn record_login(connection_pool: &PgPool, user_id: &Uuid) -> Result<(), sqlx::Error> {
sqlx::query!("INSERT INTO user_logins (user_id) VALUES ($1)", user_id)
.execute(connection_pool)
.await?;
Ok(())
}

View File

@@ -1,308 +1,74 @@
use crate::authentication::AuthenticatedUser;
use crate::routes::{COMMENTS_PER_PAGE, Query, get_max_page, join_error_messages};
use crate::session_state::TypedSession;
use crate::templates::{ErrorTemplate, MessageTemplate, PostsPageDashboardTemplate};
use crate::{ use crate::{
domain::PostEntry, domain::PostEntry,
routes::{ routes::AppError,
AppError, Path, get_comments_count_for_post, get_comments_page_for_post, not_found_html,
},
startup::AppState, startup::AppState,
templates::{HtmlTemplate, PostListTemplate, PostTemplate, PostsTemplate}, templates::{PostTemplate, PostsTemplate},
}; };
use anyhow::Context; use anyhow::Context;
use askama::Template; use askama::Template;
use axum::{ use axum::{
Extension, Form, extract::{Path, State},
extract::State, response::{Html, IntoResponse, Response},
response::{Html, IntoResponse, Redirect, Response},
}; };
use chrono::Utc;
use sqlx::PgPool; use sqlx::PgPool;
use uuid::Uuid; use uuid::Uuid;
use validator::Validate;
pub const POSTS_PER_PAGE: i64 = 3;
#[tracing::instrument(name = "Fetching most recent posts from database", skip_all)]
pub async fn list_posts( pub async fn list_posts(
State(AppState { State(AppState {
connection_pool, .. connection_pool, ..
}): State<AppState>, }): State<AppState>,
) -> Result<Response, AppError> { ) -> Result<Response, AppError> {
let count = get_posts_count(&connection_pool) let posts = get_latest_posts(&connection_pool, 5)
.await
.context("Could not fetch posts table size.")
.map_err(AppError::unexpected_page)?;
let next_page = if count > POSTS_PER_PAGE {
Some(2)
} else {
None
};
let posts = get_posts(&connection_pool, POSTS_PER_PAGE, None)
.await .await
.context("Could not fetch latest posts") .context("Could not fetch latest posts")
.map_err(AppError::unexpected_page)?; .map_err(AppError::unexpected_page)?;
let template = PostsTemplate { posts, next_page }; let template = PostsTemplate { posts };
Ok(Html(template.render().unwrap()).into_response()) Ok(Html(template.render().unwrap()).into_response())
} }
#[tracing::instrument(name = "Fetching next posts from database", skip_all)] async fn get_latest_posts(connection_pool: &PgPool, n: i64) -> Result<Vec<PostEntry>, sqlx::Error> {
pub async fn get_posts_page_dashboard(
State(AppState {
connection_pool, ..
}): State<AppState>,
Query(LoadMoreParams { page }): Query<LoadMoreParams>,
) -> Result<Response, AppError> {
let posts = get_posts_page(&connection_pool, page)
.await
.context("Could not fetch next posts page.")?;
let posts_current_page = page;
let count = get_posts_count(&connection_pool)
.await
.context("Could not fetch number of posts.")?;
let posts_max_page = get_max_page(count, POSTS_PER_PAGE);
let template = HtmlTemplate(PostsPageDashboardTemplate {
posts,
posts_current_page,
posts_max_page,
});
Ok(template.into_response())
}
async fn get_posts(
connection_pool: &PgPool,
n: i64,
offset: Option<i64>,
) -> Result<Vec<PostEntry>, sqlx::Error> {
sqlx::query_as!( sqlx::query_as!(
PostEntry, PostEntry,
r#" r#"
SELECT p.post_id, p.author_id, u.username AS author, u.full_name, SELECT p.post_id, u.username AS author, p.title, p.content, p.published_at
p.title, p.content, p.published_at, p.last_modified
FROM posts p FROM posts p
LEFT JOIN users u ON p.author_id = u.user_id LEFT JOIN users u ON p.author_id = u.user_id
ORDER BY p.published_at DESC ORDER BY p.published_at DESC
LIMIT $1 LIMIT $1
OFFSET $2
"#, "#,
n, n
offset
) )
.fetch_all(connection_pool) .fetch_all(connection_pool)
.await .await
} }
pub async fn get_posts_page(
connection_pool: &PgPool,
page: i64,
) -> Result<Vec<PostEntry>, sqlx::Error> {
let offset = (page - 1) * POSTS_PER_PAGE;
sqlx::query_as!(
PostEntry,
r#"
SELECT p.post_id, p.author_id, u.username AS author, u.full_name,
p.title, p.content, p.published_at, p.last_modified
FROM posts p
LEFT JOIN users u ON p.author_id = u.user_id
ORDER BY p.published_at DESC
LIMIT $1
OFFSET $2
"#,
POSTS_PER_PAGE,
offset
)
.fetch_all(connection_pool)
.await
}
pub async fn get_posts_count(connection_pool: &PgPool) -> Result<i64, sqlx::Error> {
sqlx::query!("SELECT count(*) FROM posts")
.fetch_one(connection_pool)
.await
.map(|r| r.count.unwrap())
}
#[derive(Validate, serde::Deserialize)]
pub struct EditPostForm {
#[validate(length(min = 1, message = "Title must be at least one character."))]
pub title: String,
#[validate(length(min = 1, message = "Content must be at least one character."))]
pub content: String,
}
#[tracing::instrument(name = "Editing post", skip_all, fields(post_id = %post_id))]
pub async fn update_post(
State(AppState {
connection_pool, ..
}): State<AppState>,
Extension(AuthenticatedUser { user_id, .. }): Extension<AuthenticatedUser>,
Path(post_id): Path<Uuid>,
Form(form): Form<EditPostForm>,
) -> Result<Response, AppError> {
let record = sqlx::query!("SELECT author_id FROM posts WHERE post_id = $1", post_id)
.fetch_optional(&connection_pool)
.await
.context("Could not fetch post author.")?;
match record {
None => Ok(HtmlTemplate(ErrorTemplate::NotFound).into_response()),
Some(record) if record.author_id == user_id => {
if let Err(e) = form.validate().map_err(join_error_messages) {
let template = HtmlTemplate(MessageTemplate::error(e));
return Ok(template.into_response());
}
sqlx::query!(
"
UPDATE posts
SET title = $1, content = $2, last_modified = $3 WHERE post_id = $4
",
form.title,
form.content,
Utc::now(),
post_id
)
.execute(&connection_pool)
.await
.context("Could not update post")?;
Ok(HtmlTemplate(MessageTemplate::success(
"Your changes have been saved.".into(),
))
.into_response())
}
_ => Ok(HtmlTemplate(MessageTemplate::error(
"You are not authorized. Only the author can edit his post.".into(),
))
.into_response()),
}
}
#[derive(serde::Deserialize)]
pub struct OriginQueryParam {
origin: Option<Uuid>,
}
#[tracing::instrument(
name = "Fetching post from database",
skip(connection_pool, origin, session)
)]
pub async fn see_post( pub async fn see_post(
session: TypedSession,
State(AppState { State(AppState {
connection_pool, .. connection_pool, ..
}): State<AppState>, }): State<AppState>,
Path(post_id): Path<Uuid>, Path(post_id): Path<Uuid>,
Query(OriginQueryParam { origin }): Query<OriginQueryParam>,
) -> Result<Response, AppError> { ) -> Result<Response, AppError> {
if let Some(origin) = origin { let post = get_post(&connection_pool, post_id)
mark_email_as_opened(&connection_pool, origin).await?;
return Ok(Redirect::to(&format!("/posts/{}", post_id)).into_response());
}
if let Some(post) = get_post_data(&connection_pool, post_id)
.await .await
.context(format!("Failed to fetch post #{}.", post_id)) .context(format!("Failed to fetch post #{}", post_id))
.map_err(AppError::unexpected_page)? .map_err(AppError::unexpected_page)?
{ .to_html()
let post_html = post .context("Could not render markdown with extension.")?;
.to_html() let template = PostTemplate { post };
.context("Could not render markdown with extension.")?; Ok(Html(template.render().unwrap()).into_response())
let current_page = 1;
let comments_count = get_comments_count_for_post(&connection_pool, post_id)
.await
.context("Could not fetch comment count.")?;
let max_page = get_max_page(comments_count, COMMENTS_PER_PAGE);
let comments = get_comments_page_for_post(&connection_pool, post_id, 1)
.await
.context("Failed to fetch latest comments.")?;
let idempotency_key = Uuid::new_v4().to_string();
let session_user_id = session
.get_user_id()
.await
.context("Could not check for session user id.")?;
let session_username = session
.get_username()
.await
.context("Could not check for session username.")?;
let template = HtmlTemplate(PostTemplate {
post,
post_html,
comments,
idempotency_key,
current_page,
max_page,
comments_count,
session_user_id,
session_username,
});
Ok(template.into_response())
} else {
Ok(not_found_html())
}
} }
#[tracing::instrument(name = "Mark email notification as opened", skip(connection_pool))] async fn get_post(connection_pool: &PgPool, post_id: Uuid) -> Result<PostEntry, sqlx::Error> {
async fn mark_email_as_opened(connection_pool: &PgPool, email_id: Uuid) -> Result<(), AppError> {
sqlx::query!(
"UPDATE notifications_delivered SET opened = TRUE WHERE email_id = $1",
email_id,
)
.execute(connection_pool)
.await
.context("Failed to mark email as opened.")
.map_err(AppError::unexpected_page)?;
Ok(())
}
async fn get_post_data(
connection_pool: &PgPool,
post_id: Uuid,
) -> Result<Option<PostEntry>, sqlx::Error> {
sqlx::query_as!( sqlx::query_as!(
PostEntry, PostEntry,
r#" r#"
SELECT p.post_id, p.author_id, u.username AS author, u.full_name, SELECT p.post_id, u.username AS author, p.title, p.content, p.published_at
p.title, p.content, p.published_at, last_modified
FROM posts p FROM posts p
LEFT JOIN users u ON p.author_id = u.user_id LEFT JOIN users u ON p.author_id = u.user_id
WHERE p.post_id = $1 WHERE p.post_id = $1
"#, "#,
post_id post_id
) )
.fetch_optional(connection_pool) .fetch_one(connection_pool)
.await .await
} }
#[derive(serde::Deserialize)]
pub struct LoadMoreParams {
page: i64,
}
#[tracing::instrument(name = "Fetching next posts in the database", skip(connection_pool))]
pub async fn load_more(
State(AppState {
connection_pool, ..
}): State<AppState>,
Query(LoadMoreParams { page }): Query<LoadMoreParams>,
) -> Result<Response, AppError> {
let posts = get_posts_page(&connection_pool, page)
.await
.context("Could not fetch posts from database.")?;
let count = get_posts_count(&connection_pool)
.await
.context("Could not fetch posts count.")?;
let max_page = get_max_page(count, POSTS_PER_PAGE);
Ok(Html(
PostListTemplate {
posts,
next_page: if page < max_page {
Some(page + 1)
} else {
None
},
}
.render()
.unwrap(),
)
.into_response())
}

View File

@@ -19,9 +19,9 @@ use uuid::Uuid;
#[tracing::instrument( #[tracing::instrument(
name = "Adding a new subscriber", name = "Adding a new subscriber",
skip_all, skip(connection_pool, email_client, base_url, form),
fields( fields(
email = %form.email, subscriber_email = %form.email,
) )
)] )]
pub async fn subscribe( pub async fn subscribe(
@@ -66,12 +66,16 @@ pub async fn subscribe(
.context("Failed to commit the database transaction to store a new subscriber.")?; .context("Failed to commit the database transaction to store a new subscriber.")?;
} }
let template = let template = MessageTemplate::Success {
MessageTemplate::success("You'll receive a confirmation email shortly.".to_string()); message: "You'll receive a confirmation email shortly.".to_string(),
};
Ok(Html(template.render().unwrap()).into_response()) Ok(Html(template.render().unwrap()).into_response())
} }
#[tracing::instrument(name = "Saving new subscriber details in the database", skip_all)] #[tracing::instrument(
name = "Saving new subscriber details in the database",
skip(transaction, new_subscriber)
)]
pub async fn insert_subscriber( pub async fn insert_subscriber(
transaction: &mut Transaction<'_, Postgres>, transaction: &mut Transaction<'_, Postgres>,
new_subscriber: &NewSubscriber, new_subscriber: &NewSubscriber,
@@ -119,7 +123,10 @@ async fn store_token(
Ok(()) Ok(())
} }
#[tracing::instrument(name = "Send confirmation email to the new subscriber", skip_all)] #[tracing::instrument(
name = "Send a confirmation email to a new subscriber",
skip(email_client, new_subscriber, base_url, subscription_token)
)]
pub async fn send_confirmation_email( pub async fn send_confirmation_email(
email_client: &EmailClient, email_client: &EmailClient,
new_subscriber: &NewSubscriber, new_subscriber: &NewSubscriber,

View File

@@ -1,39 +1,44 @@
use crate::{ use crate::{routes::generate_token, startup::AppState, templates::ConfirmTemplate};
routes::{AppError, Query, generate_token, not_found_html}, use askama::Template;
startup::AppState,
templates::{ConfirmTemplate, HtmlTemplate},
};
use anyhow::Context;
use axum::{ use axum::{
extract::State, extract::{Query, State},
response::{IntoResponse, Response}, http::StatusCode,
response::{Html, IntoResponse, Response},
}; };
use serde::Deserialize; use serde::Deserialize;
use sqlx::PgPool; use sqlx::PgPool;
use uuid::Uuid; use uuid::Uuid;
#[tracing::instrument(name = "Confirming new subscriber", skip_all)] #[tracing::instrument(name = "Confirming new subscriber", skip(params))]
pub async fn confirm( pub async fn confirm(
State(AppState { State(AppState {
connection_pool, .. connection_pool, ..
}): State<AppState>, }): State<AppState>,
Query(params): Query<Params>, Query(params): Query<Params>,
) -> Result<Response, AppError> { ) -> Response {
let subscriber_id = get_subscriber_id_from_token(&connection_pool, &params.subscription_token) let Ok(subscriber_id) =
.await get_subscriber_id_from_token(&connection_pool, &params.subscription_token).await
.context("Could not fetch subscriber id given subscription token.")?; else {
if let Some(id) = subscriber_id { return StatusCode::INTERNAL_SERVER_ERROR.into_response();
confirm_subscriber(&connection_pool, &id) };
if let Some(subscriber_id) = subscriber_id {
if confirm_subscriber(&connection_pool, &subscriber_id)
.await .await
.context("Failed to update subscriber status.")?; .is_err()
let template = HtmlTemplate(ConfirmTemplate); {
Ok(template.into_response()) StatusCode::INTERNAL_SERVER_ERROR.into_response()
} else {
Html(ConfirmTemplate.render().unwrap()).into_response()
}
} else { } else {
Ok(not_found_html()) StatusCode::UNAUTHORIZED.into_response()
} }
} }
#[tracing::instrument(name = "Mark subscriber as confirmed", skip(connection_pool))] #[tracing::instrument(
name = "Mark subscriber as confirmed",
skip(connection_pool, subscriber_id)
)]
async fn confirm_subscriber( async fn confirm_subscriber(
connection_pool: &PgPool, connection_pool: &PgPool,
subscriber_id: &Uuid, subscriber_id: &Uuid,
@@ -44,11 +49,18 @@ async fn confirm_subscriber(
subscriber_id subscriber_id
) )
.execute(connection_pool) .execute(connection_pool)
.await?; .await
.map_err(|e| {
tracing::error!("Failed to execute query: {:?}", e);
e
})?;
Ok(()) Ok(())
} }
#[tracing::instrument(name = "Get subscriber id from token", skip(connection))] #[tracing::instrument(
name = "Get subscriber_id from token",
skip(connection, subscription_token)
)]
async fn get_subscriber_id_from_token( async fn get_subscriber_id_from_token(
connection: &PgPool, connection: &PgPool,
subscription_token: &str, subscription_token: &str,
@@ -58,7 +70,11 @@ async fn get_subscriber_id_from_token(
subscription_token subscription_token
) )
.fetch_optional(connection) .fetch_optional(connection)
.await?; .await
.map_err(|e| {
tracing::error!("Failed to execute query: {:?}", e);
e
})?;
Ok(saved.map(|r| r.subscriber_id)) Ok(saved.map(|r| r.subscriber_id))
} }

View File

@@ -1,9 +1,11 @@
use crate::{ use crate::{
domain::SubscriberEmail, domain::SubscriberEmail,
email_client::EmailClient, email_client::EmailClient,
routes::{AppError, not_found_html}, routes::AppError,
startup::AppState, startup::AppState,
templates::{MessageTemplate, UnsubscribeConfirmTemplate, UnsubscribeTemplate}, templates::{
MessageTemplate, NotFoundTemplate, UnsubscribeConfirmTemplate, UnsubscribeTemplate,
},
}; };
use anyhow::Context; use anyhow::Context;
use askama::Template; use askama::Template;
@@ -12,6 +14,7 @@ use axum::{
extract::{Query, State}, extract::{Query, State},
response::{Html, IntoResponse, Response}, response::{Html, IntoResponse, Response},
}; };
use reqwest::StatusCode;
use sqlx::{Executor, PgPool}; use sqlx::{Executor, PgPool};
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
@@ -28,10 +31,6 @@ pub struct UnsubFormData {
email: String, email: String,
} }
#[tracing::instrument(
name = "Removing subscriber from database",
skip(connection_pool, email_client, base_url)
)]
pub async fn post_unsubscribe( pub async fn post_unsubscribe(
State(AppState { State(AppState {
connection_pool, connection_pool,
@@ -49,13 +48,13 @@ pub async fn post_unsubscribe(
.await .await
.context("Failed to send a confirmation email.")?; .context("Failed to send a confirmation email.")?;
} }
let template = MessageTemplate::success( let template = MessageTemplate::Success {
"If you are a subscriber, you'll receive a confirmation link shortly.".into(), message: "If you are a subscriber, you'll receive a confirmation link shortly.".into(),
); };
Ok(Html(template.render().unwrap()).into_response()) Ok(Html(template.render().unwrap()).into_response())
} }
#[tracing::instrument(name = "Fetching unsubscribe token from database", skip_all)] #[tracing::instrument(name = "Fetching unsubscribe token from the database", skip_all)]
async fn fetch_unsubscribe_token( async fn fetch_unsubscribe_token(
connection_pool: &PgPool, connection_pool: &PgPool,
subscriber_email: &SubscriberEmail, subscriber_email: &SubscriberEmail,
@@ -70,7 +69,7 @@ async fn fetch_unsubscribe_token(
Ok(r.and_then(|r| r.unsubscribe_token)) Ok(r.and_then(|r| r.unsubscribe_token))
} }
#[tracing::instrument(name = "Send an confirmation email", skip_all)] #[tracing::instrument(name = "Send an unsubscribe confirmation email", skip_all)]
pub async fn send_unsubscribe_email( pub async fn send_unsubscribe_email(
email_client: &EmailClient, email_client: &EmailClient,
subscriber_email: &SubscriberEmail, subscriber_email: &SubscriberEmail,
@@ -82,15 +81,15 @@ pub async fn send_unsubscribe_email(
base_url, unsubscribe_token base_url, unsubscribe_token
); );
let html_content = format!( let html_content = format!(
r#"You've requested to unsubscribe from the newsletter. To confirm, please click the link below:<br /> "You've requested to unsubscribe from my emails. To confirm, please click the link below:<br />\
<a href="{}">Confirm unsubscribe</a><br /> <a href=\"{}\">Confirm unsubscribe</a><br />\
If you did not request this, you can safely ignore this email."#, If you did not request this, you can safely ignore this email.",
confirmation_link confirmation_link
); );
let text_content = format!( let text_content = format!(
r#"You've requested to unsubscribe from the newsletter. To confirm, please follow the link below: "You've requested to unsubscribe from my emails. To confirm, please follow the link below:\
{} {}\
If you did not request this, you can safely ignore this email."#, If you did not request this, you can safely ignore this email.",
confirmation_link confirmation_link
); );
email_client email_client
@@ -103,7 +102,7 @@ If you did not request this, you can safely ignore this email."#,
.await .await
} }
#[tracing::instrument(name = "Removing user from database", skip(connection_pool))] #[tracing::instrument(name = "Removing user from database if he exists", skip_all)]
pub async fn unsubscribe_confirm( pub async fn unsubscribe_confirm(
Query(UnsubQueryParams { token }): Query<UnsubQueryParams>, Query(UnsubQueryParams { token }): Query<UnsubQueryParams>,
State(AppState { State(AppState {
@@ -121,7 +120,11 @@ pub async fn unsubscribe_confirm(
if result.rows_affected() == 0 { if result.rows_affected() == 0 {
tracing::info!("Unsubscribe token is not tied to any confirmed user"); tracing::info!("Unsubscribe token is not tied to any confirmed user");
Ok(not_found_html()) Ok((
StatusCode::NOT_FOUND,
Html(NotFoundTemplate.render().unwrap()),
)
.into_response())
} else { } else {
tracing::info!("User successfully removed"); tracing::info!("User successfully removed");
Ok(Html(UnsubscribeConfirmTemplate.render().unwrap()).into_response()) Ok(Html(UnsubscribeConfirmTemplate.render().unwrap()).into_response())

View File

@@ -1,336 +0,0 @@
use crate::authentication::AuthenticatedUser;
use crate::routes::{join_error_messages, verify_password};
use crate::session_state::TypedSession;
use crate::templates::{MessageTemplate, UserEditTemplate};
use crate::{
authentication::Role,
domain::{PostEntry, UserEntry},
routes::{AppError, not_found_html},
startup::AppState,
templates::{HtmlTemplate, UserTemplate},
};
use anyhow::Context;
use axum::{
Extension, Form,
extract::{Path, State},
response::{IntoResponse, Response},
};
use secrecy::{ExposeSecret, SecretString};
use sqlx::PgPool;
use uuid::Uuid;
use validator::Validate;
pub async fn user_edit_form(
Extension(AuthenticatedUser { user_id, .. }): Extension<AuthenticatedUser>,
State(AppState {
connection_pool, ..
}): State<AppState>,
) -> Result<Response, AppError> {
let user = sqlx::query_as!(
UserEntry,
r#"
SELECT user_id, username, role as "role: Role", full_name, bio, member_since
FROM users
WHERE user_id = $1
"#,
user_id
)
.fetch_one(&connection_pool)
.await
.context("Could not fetch user in database.")?;
let template = HtmlTemplate(UserEditTemplate { user });
Ok(template.into_response())
}
#[derive(Debug, Validate, serde::Deserialize)]
pub struct EditProfileForm {
user_id: Uuid,
#[validate(length(min = 3, message = "Username must be at least 3 characters."))]
username: String,
full_name: String,
bio: String,
}
#[tracing::instrument(name = "Updating user profile", skip_all, fields(user_id = %form.user_id))]
pub async fn update_user(
State(AppState {
connection_pool, ..
}): State<AppState>,
session: TypedSession,
Extension(AuthenticatedUser {
user_id: session_user_id,
username: session_username,
..
}): Extension<AuthenticatedUser>,
Form(form): Form<EditProfileForm>,
) -> Result<Response, AppError> {
if let Err(e) = form.validate().map_err(join_error_messages) {
let template = HtmlTemplate(MessageTemplate::error(e));
return Ok(template.into_response());
}
if form.user_id != session_user_id {
let template = HtmlTemplate(MessageTemplate::error(
"You are not authorized. Refresh the page and try again.".into(),
));
return Ok(template.into_response());
}
let updated_username = form.username.trim();
if updated_username != session_username
&& sqlx::query!(
"SELECT user_id FROM users WHERE username = $1",
updated_username
)
.fetch_optional(&connection_pool)
.await
.context("Could not fetch users table.")?
.is_some()
{
let template = HtmlTemplate(MessageTemplate::error(
"This username is already taken.".into(),
));
return Ok(template.into_response());
}
let updated_full_name = form.full_name.trim();
let bio = {
let bio = form.bio.trim();
if bio.is_empty() { None } else { Some(bio) }
};
sqlx::query!(
"
UPDATE users
SET username = $1, full_name = $2, bio = $3
WHERE user_id = $4
",
updated_username,
updated_full_name,
bio,
form.user_id
)
.execute(&connection_pool)
.await
.context("Failed to apply changes.")
.map_err(AppError::FormError)?;
session
.insert_username(updated_username.to_owned())
.await
.context("Could not update session username.")?;
let template = HtmlTemplate(MessageTemplate::success(
"Your profile has been updated.".into(),
));
Ok(template.into_response())
}
#[tracing::instrument(name = "Get users from database", skip(connection_pool))]
pub async fn get_users(connection_pool: &PgPool) -> Result<Vec<UserEntry>, sqlx::Error> {
sqlx::query_as!(
UserEntry,
r#"
SELECT user_id, username, role as "role: Role", full_name, bio, member_since
FROM users
ORDER BY member_since DESC
"#
)
.fetch_all(connection_pool)
.await
}
#[derive(Debug, serde::Deserialize)]
pub struct CreateUserForm {
username: String,
password: SecretString,
password_check: SecretString,
admin: Option<bool>,
}
struct NewUser {
username: String,
password_hash: SecretString,
role: Role,
}
impl TryFrom<CreateUserForm> for NewUser {
type Error = anyhow::Error;
fn try_from(value: CreateUserForm) -> Result<Self, Self::Error> {
if value.username.trim().is_empty() {
anyhow::bail!("Username cannot be empty.");
}
verify_password(value.password.expose_secret())?;
if value.password.expose_secret() != value.password_check.expose_secret() {
anyhow::bail!("Password mismatch.");
}
let role = match value.admin {
Some(true) => Role::Admin,
_ => Role::Writer,
};
let password_hash = crate::authentication::compute_pasword_hash(value.password)
.context("Failed to hash password.")?;
Ok(Self {
username: value.username,
password_hash,
role,
})
}
}
#[tracing::instrument(name = "Creating new user", skip_all, fields(username = %form.username))]
pub async fn create_user(
State(AppState {
connection_pool, ..
}): State<AppState>,
Form(form): Form<CreateUserForm>,
) -> Result<Response, AppError> {
let new_user: NewUser = match form.try_into().map_err(|e: anyhow::Error| e.to_string()) {
Err(e) => {
let template = HtmlTemplate(MessageTemplate::error(e));
return Ok(template.into_response());
}
Ok(new_user) => new_user,
};
insert_user(&connection_pool, new_user)
.await
.context("Could not insert user in database.")?;
let template = HtmlTemplate(MessageTemplate::success(
"The new user has been created.".into(),
));
Ok(template.into_response())
}
async fn insert_user(connection_pool: &PgPool, new_user: NewUser) -> Result<Uuid, sqlx::Error> {
let user_id = Uuid::new_v4();
sqlx::query!(
r#"
INSERT INTO users (user_id, username, password_hash, role)
VALUES ($1, $2, $3, $4)
"#,
user_id,
new_user.username,
new_user.password_hash.expose_secret(),
new_user.role as _
)
.execute(connection_pool)
.await?;
Ok(user_id)
}
#[derive(serde::Deserialize)]
pub struct SubscriberPathParams {
pub user_id: Uuid,
}
#[tracing::instrument(name = "Delete user from database", skip(connection_pool))]
pub async fn delete_user(
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(SubscriberPathParams { user_id }): Path<SubscriberPathParams>,
) -> Result<Response, AppError> {
let result = sqlx::query!("DELETE FROM users WHERE user_id = $1", user_id)
.execute(&connection_pool)
.await
.context("Failed to delete user from database.")?;
let template = if result.rows_affected() == 0 {
HtmlTemplate(MessageTemplate::error(
"The user could not be deleted.".into(),
))
} else {
HtmlTemplate(MessageTemplate::success(
"The user has been deleted.".into(),
))
};
Ok(template.into_response())
}
#[tracing::instrument(name = "Fetching user data", skip(connection_pool, session))]
pub async fn user_profile(
session: TypedSession,
State(AppState {
connection_pool, ..
}): State<AppState>,
Path(username): Path<String>,
) -> Result<Response, AppError> {
match fetch_user_data(&connection_pool, &username)
.await
.context("Failed to fetch user data.")?
{
Some(user) => {
let posts = fetch_user_posts(&connection_pool, &user.user_id)
.await
.context("Could not fetch user posts.")?;
let session_user_id = session
.get_user_id()
.await
.context("Could not fetch session username.")?;
let profile_user_id =
sqlx::query!("SELECT user_id FROM users WHERE username = $1", username)
.fetch_one(&connection_pool)
.await
.context("Could not fetch profile user id.")?
.user_id;
let last_seen = sqlx::query!(
"
SELECT login_time FROM user_logins
WHERE user_id = $1
ORDER BY login_time DESC
",
profile_user_id
)
.fetch_optional(&connection_pool)
.await
.context("Failed to fetch last user login")?
.map(|r| r.login_time);
let template = HtmlTemplate(UserTemplate {
user,
session_user_id,
last_seen,
posts,
});
Ok(template.into_response())
}
None => {
tracing::error!(username = %username, "user not found");
Ok(not_found_html().into_response())
}
}
}
#[tracing::instrument(name = "Fetching user profile", skip_all)]
async fn fetch_user_data(
connection_pool: &PgPool,
username: &str,
) -> Result<Option<UserEntry>, sqlx::Error> {
sqlx::query_as!(
UserEntry,
r#"
SELECT user_id, username, full_name, role as "role: Role", member_since, bio
FROM users
WHERE username = $1
"#,
username
)
.fetch_optional(connection_pool)
.await
}
#[tracing::instrument(name = "Fetching user posts", skip_all)]
async fn fetch_user_posts(
connection_pool: &PgPool,
user_id: &Uuid,
) -> Result<Vec<PostEntry>, sqlx::Error> {
sqlx::query_as!(
PostEntry,
r#"
SELECT p.author_id, u.username as author, u.full_name,
p.post_id, p.title, p.content, p.published_at, p.last_modified
FROM posts p
INNER JOIN users u ON p.author_id = u.user_id
WHERE p.author_id = $1
ORDER BY p.published_at DESC
"#,
user_id
)
.fetch_all(connection_pool)
.await
}

View File

@@ -3,8 +3,6 @@ use std::result;
use tower_sessions::{Session, session::Error}; use tower_sessions::{Session, session::Error};
use uuid::Uuid; use uuid::Uuid;
use crate::authentication::Role;
pub struct TypedSession(Session); pub struct TypedSession(Session);
type Result<T> = result::Result<T, Error>; type Result<T> = result::Result<T, Error>;
@@ -12,7 +10,6 @@ type Result<T> = result::Result<T, Error>;
impl TypedSession { impl TypedSession {
const USER_ID_KEY: &'static str = "user_id"; const USER_ID_KEY: &'static str = "user_id";
const USERNAME_KEY: &'static str = "username"; const USERNAME_KEY: &'static str = "username";
const ROLE_KEY: &'static str = "role";
pub async fn renew(&self) -> Result<()> { pub async fn renew(&self) -> Result<()> {
self.0.cycle_id().await self.0.cycle_id().await
@@ -34,23 +31,6 @@ impl TypedSession {
self.0.get(Self::USERNAME_KEY).await self.0.get(Self::USERNAME_KEY).await
} }
pub async fn insert_role(&self, role: Role) -> Result<()> {
self.0.insert(Self::ROLE_KEY, role).await
}
pub async fn get_role(&self) -> Result<Option<Role>> {
self.0.get(Self::ROLE_KEY).await
}
pub async fn has_admin_permissions(&self) -> Result<bool> {
let role = self.0.get(Self::ROLE_KEY).await?;
if let Some(Role::Admin) = role {
Ok(true)
} else {
Ok(false)
}
}
pub async fn clear(&self) { pub async fn clear(&self) {
self.0.clear().await; self.0.clear().await;
} }

Some files were not shown because too many files have changed in this diff Show More