Compare commits
No commits in common. "2023-02-19" and "master" have entirely different histories.
2023-02-19
...
master
8
.Dockerignore
Normal file
8
.Dockerignore
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
target/
|
||||||
|
.vscode
|
||||||
|
tests/
|
||||||
|
.git
|
||||||
|
.gitignore
|
||||||
|
.env
|
||||||
|
Dockerfile
|
||||||
|
migrations/
|
@ -1,25 +0,0 @@
|
|||||||
# On Windows
|
|
||||||
# ```
|
|
||||||
# cargo install -f cargo-binutils
|
|
||||||
# rustup component add llvm-tools-preview
|
|
||||||
# ```
|
|
||||||
[target.x86_64-pc-windows-msvc]
|
|
||||||
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
|
|
||||||
[target.x86_64-pc-windows-gnu]
|
|
||||||
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
|
|
||||||
|
|
||||||
# On Linux:
|
|
||||||
# - Ubuntu, `sudo apt-get install lld clang`
|
|
||||||
# - Arch, `sudo pacman -S lld clang`
|
|
||||||
[target.x86_64-unknown-linux-gnu]
|
|
||||||
rustflags = ["-C", "linker=clang", "-C", "link-arg=-fuse-ld=lld"]
|
|
||||||
|
|
||||||
# On MacOS, `brew install michaeleisel/zld/zld`
|
|
||||||
# [target.x86_64-apple-darwin]
|
|
||||||
# rustflags = ["-C", "link-arg=-fuse-ld=/usr/local/bin/zld"]
|
|
||||||
|
|
||||||
[target.aarch64-apple-darwin]
|
|
||||||
rustflags = [
|
|
||||||
"-C",
|
|
||||||
"link-arg=-fuse-ld=/opt/homebrew/Cellar/llvm/17.0.6_1/bin/ld64.lld",
|
|
||||||
]
|
|
@ -1,6 +0,0 @@
|
|||||||
.env
|
|
||||||
target/
|
|
||||||
tests/
|
|
||||||
Dockerfile
|
|
||||||
scripts/
|
|
||||||
migrations/
|
|
48
.drone.yml
Normal file
48
.drone.yml
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
kind: pipeline
|
||||||
|
type: docker
|
||||||
|
name: mailApp
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
branch:
|
||||||
|
- master
|
||||||
|
event:
|
||||||
|
- push
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: postgresDBTest # Test that the service is ready to be acted upon for cargo tests
|
||||||
|
image: postgres:12
|
||||||
|
environment:
|
||||||
|
PGPASSWORD: password
|
||||||
|
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter
|
||||||
|
commands:
|
||||||
|
- sleep 35
|
||||||
|
- "psql -U postgres -d newsletter -h postgres"
|
||||||
|
|
||||||
|
- name: sqlxMigrate
|
||||||
|
image: rust:1.57
|
||||||
|
environment:
|
||||||
|
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter
|
||||||
|
SKIP_DOCKER:
|
||||||
|
from_secret: SKIP_DOCKER
|
||||||
|
commands:
|
||||||
|
- apt update && apt install -y build-essential pkg-config libssl-dev # Dependancies for sqlx
|
||||||
|
- cargo install --version=0.5.7 sqlx-cli --no-default-features --features postgres # Install sqlx
|
||||||
|
- sqlx database create
|
||||||
|
- sqlx migrate run
|
||||||
|
|
||||||
|
- name: test
|
||||||
|
image: rust:1.57
|
||||||
|
environment:
|
||||||
|
APP_ENVIRONMENT: drone
|
||||||
|
commands:
|
||||||
|
- apt update && apt install -y build-essential pkg-config libssl-dev # Dependancies for tarpaulin
|
||||||
|
- cargo install cargo-tarpaulin
|
||||||
|
- cargo tarpaulin -v --all-features --timeout 120 --color always # RUN THOSE TESTS
|
||||||
|
|
||||||
|
services:
|
||||||
|
- name: postgres
|
||||||
|
image: postgres:12
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: password
|
||||||
|
POSTGRES_DB: newsletter
|
10
.gitignore
vendored
10
.gitignore
vendored
@ -1,11 +1,3 @@
|
|||||||
/target
|
/target
|
||||||
.vscode
|
.vscode
|
||||||
#.env
|
.env
|
||||||
.gitlab-ci-local
|
|
||||||
.DS_Store
|
|
||||||
|
|
||||||
# Added by cargo
|
|
||||||
#
|
|
||||||
# already existing elements were commented out
|
|
||||||
|
|
||||||
#/target
|
|
@ -1 +0,0 @@
|
|||||||
PRIVILEGED=true
|
|
@ -1,57 +1,30 @@
|
|||||||
image: "rust:latest"
|
image: "rust:latest"
|
||||||
|
|
||||||
services:
|
|
||||||
- postgres:latest
|
|
||||||
|
|
||||||
variables:
|
|
||||||
POSTGRES_DB: newsletter
|
|
||||||
POSTGRES_USER: postgres
|
|
||||||
POSTGRES_PASSWORD: password
|
|
||||||
POSTGRES_HOST: postgres
|
|
||||||
DB_PORT: 5432
|
|
||||||
DATABASE_URL: "postgres://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$DB_PORT/$POSTGRES_DB"
|
|
||||||
APP_DATABASE__HOST: $POSTGRES_HOST
|
|
||||||
|
|
||||||
cache: # Caches build artifacts so we don't build from scratch in both build and test
|
|
||||||
key: ${CI_COMMIT_REF_SLUG}
|
|
||||||
paths:
|
|
||||||
- .cargo/bin
|
|
||||||
- .cargo/registry/index
|
|
||||||
- .cargo/registry/cache
|
|
||||||
- target/debug/deps
|
|
||||||
- target/debug/build
|
|
||||||
policy: pull-push
|
|
||||||
|
|
||||||
default:
|
default:
|
||||||
before_script:
|
before_script:
|
||||||
- export CARGO_HOME="$CI_PROJECT_DIR/.cargo"
|
|
||||||
- export PATH="$CARGO_HOME/bin:$PATH"
|
|
||||||
- rustc --version
|
- rustc --version
|
||||||
- cargo --version
|
- cargo --version
|
||||||
- apt update -yq && apt-get install -yq postgresql-client lld clang
|
|
||||||
- if ! [ -x "$(command -v cargo-sqlx)" ]; then cargo install --version='~0.7' sqlx-cli --no-default-features --features rustls,postgres; fi
|
|
||||||
- SKIP_DOCKER=true ./scripts/init_db.sh
|
|
||||||
# This is to ensure that the database is reachable and give it some time to initialize.
|
|
||||||
- until psql "dbname=$POSTGRES_DB user=$POSTGRES_USER password=$POSTGRES_PASSWORD host=postgres" -c '\l'; do sleep 3; done
|
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
- build
|
|
||||||
- test
|
- test
|
||||||
|
|
||||||
build:
|
|
||||||
stage: build
|
|
||||||
script:
|
|
||||||
- cargo build
|
|
||||||
|
|
||||||
test-code:
|
test-code:
|
||||||
stage: test
|
stage: test
|
||||||
script:
|
script:
|
||||||
- cargo test
|
- cargo test
|
||||||
- if ! [ -x "$(command -v cargo-tarpaulin)" ]; then cargo install cargo-tarpaulin; fi
|
- cargo install cargo-tarpaulin
|
||||||
- cargo tarpaulin --ignore-tests
|
- cargo tarpaulin --ignore-tests
|
||||||
|
|
||||||
lint-code:
|
lint-code:
|
||||||
stage: test
|
stage: test
|
||||||
script:
|
script:
|
||||||
|
- rustup component add rustfmt
|
||||||
|
- cargo fmt -- --check
|
||||||
- rustup component add clippy
|
- rustup component add clippy
|
||||||
- cargo clippy -- -D warnings
|
- cargo clippy -- -D warnings
|
||||||
|
|
||||||
|
audit-code:
|
||||||
|
stage: test
|
||||||
|
script:
|
||||||
|
- cargo install cargo-audit
|
||||||
|
- cargo audit
|
@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"db_name": "PostgreSQL",
|
|
||||||
"query": "\n INSERT INTO subscriptions (id, email, name, subscribed_at)\n VALUES ($1, $2, $3, $4)\n ",
|
|
||||||
"describe": {
|
|
||||||
"columns": [],
|
|
||||||
"parameters": {
|
|
||||||
"Left": [
|
|
||||||
"Uuid",
|
|
||||||
"Text",
|
|
||||||
"Text",
|
|
||||||
"Timestamptz"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"nullable": []
|
|
||||||
},
|
|
||||||
"hash": "bcfcfebc6f5e8ffbf97d97c5a209be78b46d703924482cf8b43842705fcb7714"
|
|
||||||
}
|
|
2344
Cargo.lock
generated
2344
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
42
Cargo.toml
42
Cargo.toml
@ -1,6 +1,7 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "mail_app"
|
name = "mail_app"
|
||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
authors = ["NickBland <nick.bland@nickbland.dev>"]
|
||||||
edition = "2021"
|
edition = "2021"
|
||||||
|
|
||||||
[lib]
|
[lib]
|
||||||
@ -11,32 +12,27 @@ path = "src/main.rs"
|
|||||||
name = "mail_app"
|
name = "mail_app"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
actix-web = "4"
|
actix-web = "=4.0.0-beta.16"
|
||||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||||
serde = { version = "1", features = ["derive"] }
|
serde = "1.0.115"
|
||||||
serde-aux = "4"
|
config = { version = "0.11", default-features = false, features = ["yaml"] }
|
||||||
config = { version = "0.13", default-features = false, features = ["yaml"] }
|
sqlx = { version = "0.5.5", default-features = false, features = [ "runtime-actix-rustls", "macros", "postgres", "uuid", "chrono", "migrate", "offline"] }
|
||||||
uuid = { version = "1", features = ["v4"] }
|
uuid = { version = "0.8.1", features = ["v4"] }
|
||||||
chrono = { version = "0.4.22", default-features = false, features = ["clock"] }
|
chrono = "0.4.15"
|
||||||
tracing = { version = "0.1", features = ["log"] }
|
tracing = "0.1.19"
|
||||||
tracing-subscriber = { version = "0.3", features = ["registry", "env-filter"] }
|
tracing-subscriber = { version = "0.3", features = ["registry", "env-filter"] }
|
||||||
tracing-bunyan-formatter = "0.3"
|
tracing-bunyan-formatter = "0.3.1"
|
||||||
tracing-actix-web = "0.7"
|
tracing-log = "0.1.1"
|
||||||
tracing-log = "0.1"
|
serde-aux = "3"
|
||||||
|
tracing-actix-web = "0.5.0-beta.7"
|
||||||
secrecy = { version = "0.8", features = ["serde"] }
|
secrecy = { version = "0.8", features = ["serde"] }
|
||||||
|
unicode-segmentation = "1"
|
||||||
[dependencies.sqlx]
|
validator = "0.14"
|
||||||
version = "0.7"
|
|
||||||
default-features = false
|
|
||||||
features = [
|
|
||||||
"runtime-tokio-rustls",
|
|
||||||
"macros",
|
|
||||||
"postgres",
|
|
||||||
"uuid",
|
|
||||||
"chrono",
|
|
||||||
"migrate",
|
|
||||||
]
|
|
||||||
|
|
||||||
[dev-dependencies]
|
[dev-dependencies]
|
||||||
reqwest = { version = "0.11", features = ["json"] }
|
reqwest = { version = "0.11", features = ["json"] }
|
||||||
once_cell = "1"
|
once_cell = "1.7.2"
|
||||||
|
claim = "0.5"
|
||||||
|
fake = "~2.3"
|
||||||
|
quickcheck = "0.9.2"
|
||||||
|
quickcheck_macros = "0.9.1"
|
30
Dockerfile
30
Dockerfile
@ -1,32 +1,30 @@
|
|||||||
##### Chef
|
FROM lukemathwalker/cargo-chef:latest-rust-1.57.0 AS chef
|
||||||
FROM lukemathwalker/cargo-chef:latest-rust-1.76.0 as chef
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apt update && apt install lld clang -y
|
|
||||||
|
|
||||||
##### Planner
|
# Create lock file for project to be used in builder
|
||||||
FROM chef as planner
|
FROM chef AS planner
|
||||||
COPY . .
|
COPY . .
|
||||||
RUN cargo chef prepare --recipe-path recipe.json
|
RUN cargo chef prepare --recipe-path recipe.json
|
||||||
|
|
||||||
##### Builder
|
# Build dependancies
|
||||||
# Builder prepares project dependancies, not the application.
|
|
||||||
FROM chef as builder
|
FROM chef as builder
|
||||||
COPY --from=planner /app/recipe.json recipe.json
|
COPY --from=planner /app/recipe.json recipe.json
|
||||||
RUN cargo chef cook --release --recipe-path recipe.json
|
RUN cargo chef cook --release --recipe-path recipe.json
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
ENV SQLX_OFFLINE true
|
ENV SQLX_OFFLINE true
|
||||||
# Now build the application itself.
|
RUN cargo build --release
|
||||||
RUN cargo build --release --bin mail_app
|
|
||||||
|
|
||||||
##### Runtime
|
FROM debian:bullseye-slim AS runtime
|
||||||
FROM debian:bookworm-slim as runtime
|
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
RUN apt update && apt install -y --no-install-recommends openssl ca-certificates \
|
|
||||||
&& apt autoremove -y \
|
# Install dependancies required
|
||||||
&& apt clean -y \
|
RUN apt update -y && apt install -y --no-install-recommends openssl && apt autoremove -y && apt clean -y && rm -rf /var/lib/apt/lists/*
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
|
||||||
|
# Copy the fully built binary and configuration to the image
|
||||||
COPY --from=builder /app/target/release/mail_app mail_app
|
COPY --from=builder /app/target/release/mail_app mail_app
|
||||||
COPY configuration configuration
|
COPY configuration configuration
|
||||||
ENV APP_ENVIRONMENT production
|
ENV APP_ENVIRONMENT production
|
||||||
|
|
||||||
ENTRYPOINT ["./mail_app"]
|
ENTRYPOINT ["./mail_app"]
|
5
configuration/drone.yaml
Normal file
5
configuration/drone.yaml
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
application:
|
||||||
|
host: 0.0.0.0
|
||||||
|
database:
|
||||||
|
host: "postgres"
|
||||||
|
require_ssl: false
|
@ -1,6 +1,5 @@
|
|||||||
-- migrations/{timestamp}_create_subscriptions_table.sql
|
|
||||||
-- Create Subscriptions Table
|
-- Create Subscriptions Table
|
||||||
CREATE TABLE subscriptions(
|
CREATE TABLE Subscriptions(
|
||||||
id uuid NOT NULL,
|
id uuid NOT NULL,
|
||||||
PRIMARY KEY (id),
|
PRIMARY KEY (id),
|
||||||
email TEXT NOT NULL UNIQUE,
|
email TEXT NOT NULL UNIQUE,
|
53
scripts/init_db.sh
Executable file → Normal file
53
scripts/init_db.sh
Executable file → Normal file
@ -3,49 +3,50 @@ set -x
|
|||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
if ! [ -x "$(command -v psql)" ]; then
|
if ! [ -x "$(command -v psql)" ]; then
|
||||||
echo >&2 "Error: psql is not installed."
|
echo >&2 "Error: `psql` is not installed."
|
||||||
exit 1
|
echo >&2 "Use:"
|
||||||
|
echo >&2 " sudo apt update && sudo apt install postgresql-client"
|
||||||
|
echo >&2 "to install it."
|
||||||
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if ! [ -x "$(command -v sqlx)" ]; then
|
if ! [ -x "$(command -v sqlx)" ]; then
|
||||||
echo >&2 "Error: sqlx is not installed."
|
echo >&2 "Error: `sqlx` is not installed."
|
||||||
echo >&2 "Use:"
|
echo >&2 "Use:"
|
||||||
echo >&2 " cargo install --version="~0.6" sqlx-cli"
|
echo >&2 " sudo apt install build-essential pkg-config libssl-dev"
|
||||||
echo >&2 " --no-default-features --feature rustls,postgres"
|
echo >&2 " cargo install --version=0.5.7 sqlx-cli --no-default-features --features postgres"
|
||||||
echo >&2 "to install."
|
echo >&2 "to install it."
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
DB_USER="${POSTGRES_USER:=postgres}"
|
DB_USER=${POSTGRES_USER:=postgres}
|
||||||
DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
|
DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
|
||||||
DB_NAME="${POSTGRES_DB:=newsletter}"
|
DB_NAME="${POSTGRES_DB:=newsletter}"
|
||||||
DB_PORT="${POSTGRES_PORT:=5432}"
|
DB_PORT="${POSTGRES_PORT:=5432}"
|
||||||
DB_HOST="${POSTGRES_HOST:=localhost}"
|
|
||||||
|
|
||||||
|
# Allow to skip Docker installation if Postgres is already running
|
||||||
if [[ -z "${SKIP_DOCKER}" ]]
|
if [[ -z "${SKIP_DOCKER}" ]]
|
||||||
then
|
then
|
||||||
docker run \
|
docker run \
|
||||||
-e POSTGRES_USER=${DB_USER} \
|
--name postgres-db \
|
||||||
-e POSTGRES_PASSWORD=${DB_PASSWORD} \
|
-e POSTGRES_USER=${DB_USER} \
|
||||||
-e POSTGRES_DB=${DB_NAME} \
|
-e POSTGRES_PASSWORD=${DB_PASSWORD} \
|
||||||
-p "${DB_PORT}":5432 \
|
-e POSTGRES_DB=${DB_NAME} \
|
||||||
--name "mailAppDB" \
|
-p "${DB_PORT}":5432 \
|
||||||
-d postgres:alpine \
|
-d postgres \
|
||||||
postgres -N 1000
|
postgres -N 1000
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Ping until ready to accept commands
|
|
||||||
export PGPASSWORD="${DB_PASSWORD}"
|
export PGPASSWORD="${DB_PASSWORD}"
|
||||||
until psql -h "${DB_HOST}" -U "${DB_USER}" -p "${DB_PORT}" -d "postgres" -c '\q'; do
|
until psql -h "localhost" -U "${DB_USER}" -p "${DB_PORT}" -d "postgres" -c '\q'; do
|
||||||
>&2 echo "Postgres is still unavailable - sleeping"
|
>&2 echo "Postgres is still unavailable - sleeping"
|
||||||
sleep 1
|
sleep 1
|
||||||
done
|
done
|
||||||
|
|
||||||
>&2 echo "Postgres is running on port ${DB_PORT}, and ready to accept commands!"
|
>&2 echo "Postgres is up and running on port ${DB_PORT} - running migrations."
|
||||||
|
|
||||||
DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}
|
export DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@localhost:${DB_PORT}/${DB_NAME}
|
||||||
export DATABASE_URL
|
|
||||||
sqlx database create
|
sqlx database create
|
||||||
sqlx migrate run
|
sqlx migrate run
|
||||||
|
|
||||||
>&2 echo "Postgres has been migrated, ready for queries!"
|
>&2 echo "Postgres has been migrated. Jobs Complete."
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -x
|
|
||||||
set -eo pipefail
|
|
||||||
|
|
||||||
DB_USER="${POSTGRES_USER:=postgres}"
|
|
||||||
DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
|
|
||||||
DB_NAME="${POSTGRES_DB:=newsletter}"
|
|
||||||
DB_PORT="${POSTGRES_PORT:=5432}"
|
|
||||||
DB_HOST="${POSTGRES_HOST:=localhost}"
|
|
||||||
|
|
||||||
for dbname in $(psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -c "copy (select datname from pg_database where datname like '%-%-%-%-%') to stdout") ; do
|
|
||||||
echo "dropping database $dbname"
|
|
||||||
dropdb -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" "$dbname"
|
|
||||||
done
|
|
41
spec.yaml
Normal file
41
spec.yaml
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
#! spec.yaml
|
||||||
|
name: rust-project
|
||||||
|
|
||||||
|
region: sgp1
|
||||||
|
services:
|
||||||
|
- name: rust-project
|
||||||
|
dockerfile_path: Dockerfile
|
||||||
|
source_dir: .
|
||||||
|
github:
|
||||||
|
repo: NickBland/mailApp
|
||||||
|
branch: master
|
||||||
|
deploy_on_push: true
|
||||||
|
health_check:
|
||||||
|
http_path: /health_check
|
||||||
|
http_port: 8000
|
||||||
|
instance_count: 1
|
||||||
|
instance_size_slug: basic-xxs
|
||||||
|
routes:
|
||||||
|
- path: /
|
||||||
|
envs:
|
||||||
|
- key: APP_DATABASE__USERNAME
|
||||||
|
scope: RUN_TIME
|
||||||
|
value: ${newsletter.USERNAME}
|
||||||
|
- key: APP_DATABASE__PASSWORD
|
||||||
|
scope: RUN_TIME
|
||||||
|
value: ${newsletter.PASSWORD}
|
||||||
|
- key: APP_DATABASE__HOST
|
||||||
|
scope: RUN_TIME
|
||||||
|
value: ${newsletter.HOSTNAME}
|
||||||
|
- key: APP_DATABASE__PORT
|
||||||
|
scope: RUN_TIME
|
||||||
|
value: ${newsletter.PORT}
|
||||||
|
- key: APP_DATABASE__DATABASE_NAME
|
||||||
|
scope: RUN_TIME
|
||||||
|
value: ${newsletter.DATABASE}
|
||||||
|
databases:
|
||||||
|
- engine: PG
|
||||||
|
name: newsletter
|
||||||
|
num_nodes: 1
|
||||||
|
size: deb-s-dev-database
|
||||||
|
version: "12"
|
18
sqlx-data.json
Normal file
18
sqlx-data.json
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"db": "PostgreSQL",
|
||||||
|
"793f0df728d217c204123f12e4eafd6439db2d49d0cb506618ae9e780c7e0558": {
|
||||||
|
"query": "\n INSERT INTO subscriptions (id, email, name, subscribed_at)\n VALUES ($1, $2, $3, $4)\n ",
|
||||||
|
"describe": {
|
||||||
|
"columns": [],
|
||||||
|
"parameters": {
|
||||||
|
"Left": [
|
||||||
|
"Uuid",
|
||||||
|
"Text",
|
||||||
|
"Text",
|
||||||
|
"Timestamptz"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"nullable": []
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,6 +1,8 @@
|
|||||||
use secrecy::{ExposeSecret, Secret};
|
use std::convert::{TryFrom, TryInto};
|
||||||
|
|
||||||
use serde_aux::field_attributes::deserialize_number_from_string;
|
use serde_aux::field_attributes::deserialize_number_from_string;
|
||||||
use sqlx::postgres::{PgConnectOptions, PgSslMode};
|
use sqlx::postgres::{PgConnectOptions, PgSslMode};
|
||||||
|
use sqlx::ConnectOptions;
|
||||||
|
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
pub struct Settings {
|
pub struct Settings {
|
||||||
@ -8,17 +10,6 @@ pub struct Settings {
|
|||||||
pub application: ApplicationSettings,
|
pub application: ApplicationSettings,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(serde::Deserialize)]
|
|
||||||
pub struct DatabaseSettings {
|
|
||||||
pub username: String,
|
|
||||||
pub password: Secret<String>,
|
|
||||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
|
||||||
pub port: u16,
|
|
||||||
pub host: String,
|
|
||||||
pub database_name: String,
|
|
||||||
pub require_ssl: bool,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
pub struct ApplicationSettings {
|
pub struct ApplicationSettings {
|
||||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
@ -26,62 +17,15 @@ pub struct ApplicationSettings {
|
|||||||
pub host: String,
|
pub host: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub enum Environment {
|
#[derive(serde::Deserialize)]
|
||||||
Local,
|
pub struct DatabaseSettings {
|
||||||
Production,
|
pub username: String,
|
||||||
}
|
pub password: String,
|
||||||
|
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||||
impl Environment {
|
pub port: u16,
|
||||||
pub fn as_str(&self) -> &'static str {
|
pub host: String,
|
||||||
match self {
|
pub database_name: String,
|
||||||
Environment::Local => "local",
|
pub require_ssl: bool,
|
||||||
Environment::Production => "production",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<String> for Environment {
|
|
||||||
type Error = String;
|
|
||||||
|
|
||||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
|
||||||
match s.to_lowercase().as_str() {
|
|
||||||
"local" => Ok(Environment::Local),
|
|
||||||
"production" => Ok(Environment::Production),
|
|
||||||
e => Err(format!(
|
|
||||||
"{} is not a supported environment. Use `local` or `production`",
|
|
||||||
e
|
|
||||||
)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
|
|
||||||
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
|
|
||||||
let configuration_directory = base_path.join("configuration");
|
|
||||||
|
|
||||||
// Detect current environment, default to LOCAL
|
|
||||||
let environment: Environment = std::env::var("APP_ENVIRONMENT")
|
|
||||||
.unwrap_or_else(|_| "local".into())
|
|
||||||
.try_into()
|
|
||||||
.expect("Failed to parse APP_ENVIRONMENT");
|
|
||||||
let environment_filename = format!("{}.yaml", environment.as_str());
|
|
||||||
|
|
||||||
// initialise config reader
|
|
||||||
let settings = config::Config::builder()
|
|
||||||
.add_source(config::File::from(
|
|
||||||
configuration_directory.join("base.yaml"),
|
|
||||||
))
|
|
||||||
.add_source(config::File::from(
|
|
||||||
configuration_directory.join(environment_filename),
|
|
||||||
))
|
|
||||||
.add_source(
|
|
||||||
config::Environment::with_prefix("APP")
|
|
||||||
.prefix_separator("_")
|
|
||||||
.separator("__"),
|
|
||||||
)
|
|
||||||
.build()?;
|
|
||||||
|
|
||||||
settings.try_deserialize::<Settings>()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DatabaseSettings {
|
impl DatabaseSettings {
|
||||||
@ -92,14 +36,71 @@ impl DatabaseSettings {
|
|||||||
PgSslMode::Prefer
|
PgSslMode::Prefer
|
||||||
};
|
};
|
||||||
PgConnectOptions::new()
|
PgConnectOptions::new()
|
||||||
.username(&self.username)
|
|
||||||
.password(self.password.expose_secret())
|
|
||||||
.host(&self.host)
|
.host(&self.host)
|
||||||
|
.username(&self.username)
|
||||||
|
.password(&self.password)
|
||||||
.port(self.port)
|
.port(self.port)
|
||||||
.ssl_mode(ssl_mode)
|
.ssl_mode(ssl_mode)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn with_db(&self) -> PgConnectOptions {
|
pub fn with_db(&self) -> PgConnectOptions {
|
||||||
self.without_db().database(&self.database_name)
|
let mut options = self.without_db().database(&self.database_name);
|
||||||
|
options.log_statements(tracing::log::LevelFilter::Trace);
|
||||||
|
options
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
|
||||||
|
// Initialise configuration reader
|
||||||
|
let mut settings = config::Config::default();
|
||||||
|
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
|
||||||
|
let configuration_directory = base_path.join("configuration");
|
||||||
|
|
||||||
|
// Read default config file
|
||||||
|
settings.merge(config::File::from(configuration_directory.join("base")).required(true))?;
|
||||||
|
|
||||||
|
let environment: Environment = std::env::var("APP_ENVIRONMENT")
|
||||||
|
.unwrap_or_else(|_| "local".into())
|
||||||
|
.try_into()
|
||||||
|
.expect("Failed to parse APP_ENVIRONMENT.");
|
||||||
|
|
||||||
|
settings.merge(
|
||||||
|
config::File::from(configuration_directory.join(environment.as_str())).required(true),
|
||||||
|
)?;
|
||||||
|
|
||||||
|
settings.merge(config::Environment::with_prefix("app").separator("__"))?;
|
||||||
|
|
||||||
|
// Try convert into Settings type
|
||||||
|
settings.try_into()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum Environment {
|
||||||
|
Local,
|
||||||
|
Production,
|
||||||
|
Drone
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Environment {
|
||||||
|
pub fn as_str(&self) -> &'static str {
|
||||||
|
match self {
|
||||||
|
Environment::Local => "local",
|
||||||
|
Environment::Production => "production",
|
||||||
|
Environment::Drone => "drone",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TryFrom<String> for Environment {
|
||||||
|
type Error = String;
|
||||||
|
|
||||||
|
fn try_from(s: String) -> Result<Self, Self::Error> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"local" => Ok(Self::Local),
|
||||||
|
"production" => Ok(Self::Production),
|
||||||
|
"drone" => Ok(Self::Drone),
|
||||||
|
other => Err(format!(
|
||||||
|
"{} is nto a supported environment. Use either `local`, `production` or `drone`.", other
|
||||||
|
)),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
7
src/domain/mod.rs
Normal file
7
src/domain/mod.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
mod subscriber_name;
|
||||||
|
mod subscriber_email;
|
||||||
|
mod new_subscriber;
|
||||||
|
|
||||||
|
pub use subscriber_name::SubscriberName;
|
||||||
|
pub use new_subscriber::NewSubscriber;
|
||||||
|
pub use subscriber_email::SubscriberEmail;
|
7
src/domain/new_subscriber.rs
Normal file
7
src/domain/new_subscriber.rs
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
use crate::domain::SubscriberName;
|
||||||
|
use crate::domain::SubscriberEmail;
|
||||||
|
|
||||||
|
pub struct NewSubscriber {
|
||||||
|
pub email: SubscriberEmail,
|
||||||
|
pub name: SubscriberName,
|
||||||
|
}
|
61
src/domain/subscriber_email.rs
Normal file
61
src/domain/subscriber_email.rs
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
use validator::validate_email;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SubscriberEmail(String);
|
||||||
|
|
||||||
|
impl SubscriberEmail {
|
||||||
|
pub fn parse(s: String) -> Result<SubscriberEmail, String> {
|
||||||
|
if validate_email(&s) {
|
||||||
|
Ok(Self(s))
|
||||||
|
} else {
|
||||||
|
Err(format!("{} is not a valid email address", s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<str> for SubscriberEmail {
|
||||||
|
fn as_ref(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use super::SubscriberEmail;
|
||||||
|
use claim::assert_err;
|
||||||
|
use fake::faker::internet::en::SafeEmail;
|
||||||
|
use fake::Fake;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_string_is_rejected() {
|
||||||
|
let email = "".to_string();
|
||||||
|
assert_err!(SubscriberEmail::parse(email));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn email_missing_at_symbol_is_rejected() {
|
||||||
|
let email = "ursuladomain.com".to_string();
|
||||||
|
assert_err!(SubscriberEmail::parse(email));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn email_missing_subject_is_rejected() {
|
||||||
|
let email = "@domain.com".to_string();
|
||||||
|
assert_err!(SubscriberEmail::parse(email));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
struct ValidEmailFixture(pub String);
|
||||||
|
|
||||||
|
impl quickcheck::Arbitrary for ValidEmailFixture {
|
||||||
|
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Self {
|
||||||
|
let email = SafeEmail().fake_with_rng(g);
|
||||||
|
Self(email)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[quickcheck_macros::quickcheck]
|
||||||
|
fn valid_emails_are_parsed_successfully(valid_email: ValidEmailFixture) -> bool {
|
||||||
|
SubscriberEmail::parse(valid_email.0).is_ok()
|
||||||
|
}
|
||||||
|
}
|
64
src/domain/subscriber_name.rs
Normal file
64
src/domain/subscriber_name.rs
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
use unicode_segmentation::UnicodeSegmentation;
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub struct SubscriberName(String);
|
||||||
|
|
||||||
|
impl SubscriberName {
|
||||||
|
pub fn parse(s: String) -> Result<SubscriberName, String> {
|
||||||
|
let is_empty_or_whitespace = s.trim().is_empty(); // Remove trailing whitespaces and check if containsd any characters
|
||||||
|
let is_too_long = s.graphemes(true).count() > 256;
|
||||||
|
let forbidden_characters = ['/', '(', ')', '"', '<', '>', '\\', '{', '}'];
|
||||||
|
let contains_forbidden_characters = s.chars().any(|g| forbidden_characters.contains(&g)); // Iterate to check if name contains any of the forbidden characters
|
||||||
|
|
||||||
|
// Return `false` if any conditions are violated
|
||||||
|
if is_empty_or_whitespace || is_too_long || contains_forbidden_characters {
|
||||||
|
Err(format!("{} is not a valid subscriber name", s))
|
||||||
|
} else {
|
||||||
|
Ok(Self(s))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AsRef<str> for SubscriberName {
|
||||||
|
fn as_ref(&self) -> &str {
|
||||||
|
&self.0
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod tests {
|
||||||
|
use crate::domain::SubscriberName;
|
||||||
|
use claim::{assert_err, assert_ok};
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn a_name_longer_than_256_graphemes_is_rejected() {
|
||||||
|
let name = "a".repeat(257);
|
||||||
|
assert_err!(SubscriberName::parse(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn whitespace_only_names_are_rejected() {
|
||||||
|
let name = " ".to_string();
|
||||||
|
assert_err!(SubscriberName::parse(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn empty_string_is_rejected() {
|
||||||
|
let name = "".to_string();
|
||||||
|
assert_err!(SubscriberName::parse(name));
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn names_containing_invalid_characters_are_rejected() {
|
||||||
|
for name in &['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
||||||
|
let name = name.to_string();
|
||||||
|
assert_err!(SubscriberName::parse(name));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn a_valid_name_is_parsed_successfully() {
|
||||||
|
let name = "Ursula Le Guin".to_string();
|
||||||
|
assert_ok!(SubscriberName::parse(name));
|
||||||
|
}
|
||||||
|
}
|
@ -1,4 +1,6 @@
|
|||||||
|
#![allow(clippy::toplevel_ref_arg)]
|
||||||
pub mod configuration;
|
pub mod configuration;
|
||||||
pub mod routes;
|
pub mod routes;
|
||||||
pub mod startup;
|
pub mod startup;
|
||||||
pub mod telemetry;
|
pub mod telemetry;
|
||||||
|
pub mod domain;
|
30
src/main.rs
30
src/main.rs
@ -1,20 +1,26 @@
|
|||||||
use mail_app::configuration::get_configuration;
|
|
||||||
use mail_app::startup::run;
|
|
||||||
use mail_app::telemetry::{get_subscriber, init_subscriber};
|
|
||||||
use sqlx::postgres::PgPoolOptions;
|
|
||||||
use std::net::TcpListener;
|
use std::net::TcpListener;
|
||||||
|
use sqlx::postgres::PgPoolOptions;
|
||||||
|
|
||||||
|
use mail_app::startup::run;
|
||||||
|
use mail_app::configuration::get_configuration;
|
||||||
|
use mail_app::telemetry::{get_subscriber, init_subscriber};
|
||||||
|
|
||||||
#[tokio::main]
|
#[tokio::main]
|
||||||
async fn main() -> Result<(), std::io::Error> {
|
async fn main() -> std::io::Result<()> {
|
||||||
let subscriber = get_subscriber("mail_app".into(), "info".into(), std::io::stdout);
|
let subscriber = get_subscriber("mail_app".into(), "info".into(), std::io::stdout);
|
||||||
init_subscriber(subscriber);
|
init_subscriber(subscriber);
|
||||||
|
|
||||||
let configuration = get_configuration().expect("Failed to read configuration");
|
// Attempt to read from config
|
||||||
let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db());
|
let configuration = get_configuration().expect("Failed to read configuration data.");
|
||||||
let address = format!(
|
|
||||||
"{}:{}",
|
// Configure connection to database for our startup
|
||||||
configuration.application.host, configuration.application.port
|
let connection_pool = PgPoolOptions::new()
|
||||||
);
|
.connect_timeout(std::time::Duration::from_secs(2))
|
||||||
|
.connect_lazy_with(configuration.database.with_db());
|
||||||
|
|
||||||
|
// Take port from settings file
|
||||||
|
let address = format!("{}:{}", configuration.application.host, configuration.application.port);
|
||||||
let listener = TcpListener::bind(address)?;
|
let listener = TcpListener::bind(address)?;
|
||||||
run(listener, connection_pool)?.await
|
run(listener, connection_pool)?.await?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
@ -1,14 +1,27 @@
|
|||||||
use actix_web::{web, HttpResponse};
|
use actix_web::{web, HttpResponse};
|
||||||
use chrono::Utc;
|
|
||||||
use sqlx::PgPool;
|
use sqlx::PgPool;
|
||||||
|
use chrono::Utc;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
|
use crate::domain::{NewSubscriber, SubscriberName, SubscriberEmail};
|
||||||
|
|
||||||
#[derive(serde::Deserialize)]
|
#[derive(serde::Deserialize)]
|
||||||
pub struct FormData {
|
pub struct FormData {
|
||||||
email: String,
|
email: String,
|
||||||
name: String,
|
name: String
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFrom<FormData> for NewSubscriber {
|
||||||
|
type Error = String;
|
||||||
|
|
||||||
|
fn try_from(value: FormData) -> Result<Self, Self::Error> {
|
||||||
|
let name = SubscriberName::parse(value.name)?;
|
||||||
|
let email = SubscriberEmail::parse(value.email)?;
|
||||||
|
Ok(Self { email, name })
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[allow(clippy::async_yields_async)]
|
||||||
#[tracing::instrument(
|
#[tracing::instrument(
|
||||||
name = "Adding a new subscriber",
|
name = "Adding a new subscriber",
|
||||||
skip(form, pool),
|
skip(form, pool),
|
||||||
@ -17,9 +30,12 @@ pub struct FormData {
|
|||||||
subscriber_name = %form.name
|
subscriber_name = %form.name
|
||||||
)
|
)
|
||||||
)]
|
)]
|
||||||
|
pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>,) -> HttpResponse {
|
||||||
pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>) -> HttpResponse {
|
let new_subscriber = match form.0.try_into() {
|
||||||
match insert_subscriber(&pool, &form).await {
|
Ok(form) => form,
|
||||||
|
Err(_) => return HttpResponse::BadRequest().finish(),
|
||||||
|
};
|
||||||
|
match insert_subscriber(&pool, &new_subscriber).await {
|
||||||
Ok(_) => HttpResponse::Ok().finish(),
|
Ok(_) => HttpResponse::Ok().finish(),
|
||||||
Err(_) => HttpResponse::InternalServerError().finish(),
|
Err(_) => HttpResponse::InternalServerError().finish(),
|
||||||
}
|
}
|
||||||
@ -27,18 +43,17 @@ pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>) -> Ht
|
|||||||
|
|
||||||
#[tracing::instrument(
|
#[tracing::instrument(
|
||||||
name = "Saving new subscriber details in the database",
|
name = "Saving new subscriber details in the database",
|
||||||
skip(form, pool)
|
skip(new_subscriber, pool)
|
||||||
)]
|
)]
|
||||||
|
pub async fn insert_subscriber(pool: &PgPool, new_subscriber: &NewSubscriber) -> Result<(), sqlx::Error> {
|
||||||
pub async fn insert_subscriber(pool: &PgPool, form: &FormData) -> Result<(), sqlx::Error> {
|
|
||||||
sqlx::query!(
|
sqlx::query!(
|
||||||
r#"
|
r#"
|
||||||
INSERT INTO subscriptions (id, email, name, subscribed_at)
|
INSERT INTO subscriptions (id, email, name, subscribed_at)
|
||||||
VALUES ($1, $2, $3, $4)
|
VALUES ($1, $2, $3, $4)
|
||||||
"#,
|
"#,
|
||||||
Uuid::new_v4(),
|
Uuid::new_v4(),
|
||||||
form.email,
|
new_subscriber.email.as_ref(),
|
||||||
form.name,
|
new_subscriber.name.as_ref(),
|
||||||
Utc::now()
|
Utc::now()
|
||||||
)
|
)
|
||||||
.execute(pool)
|
.execute(pool)
|
||||||
|
@ -1,10 +1,12 @@
|
|||||||
use crate::routes::{health_check, subscribe};
|
use actix_web::{web, App, HttpServer};
|
||||||
use actix_web::dev::Server;
|
use actix_web::dev::Server;
|
||||||
use actix_web::{web, web::Data, App, HttpServer};
|
use actix_web::web::Data;
|
||||||
use sqlx::PgPool;
|
|
||||||
use std::net::TcpListener;
|
use std::net::TcpListener;
|
||||||
|
use sqlx::PgPool;
|
||||||
use tracing_actix_web::TracingLogger;
|
use tracing_actix_web::TracingLogger;
|
||||||
|
|
||||||
|
use crate::routes::{health_check, subscribe};
|
||||||
|
|
||||||
pub fn run(listener: TcpListener, db_pool: PgPool) -> Result<Server, std::io::Error> {
|
pub fn run(listener: TcpListener, db_pool: PgPool) -> Result<Server, std::io::Error> {
|
||||||
let db_pool = Data::new(db_pool);
|
let db_pool = Data::new(db_pool);
|
||||||
let server = HttpServer::new(move || {
|
let server = HttpServer::new(move || {
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
use tracing::{subscriber::set_global_default, Subscriber};
|
use tracing::{Subscriber, subscriber::set_global_default};
|
||||||
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
||||||
use tracing_log::LogTracer;
|
use tracing_log::LogTracer;
|
||||||
use tracing_subscriber::{fmt::MakeWriter, layer::SubscriberExt, EnvFilter, Registry};
|
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry, fmt::MakeWriter};
|
||||||
|
|
||||||
|
/// Compose multiple layers into a tracing compatible subscriber
|
||||||
pub fn get_subscriber<Sink>(
|
pub fn get_subscriber<Sink>(
|
||||||
name: String,
|
name: String,
|
||||||
env_filter: String,
|
env_filter: String,
|
||||||
sink: Sink,
|
sink: Sink,
|
||||||
) -> impl Subscriber + Send + Sync
|
) -> impl Subscriber + Sync + Send
|
||||||
where
|
where
|
||||||
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
|
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
let env_filter =
|
let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
||||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
|
||||||
let formatting_layer = BunyanFormattingLayer::new(name, sink);
|
let formatting_layer = BunyanFormattingLayer::new(name, sink);
|
||||||
Registry::default()
|
Registry::default()
|
||||||
.with(env_filter)
|
.with(env_filter)
|
||||||
@ -20,7 +20,8 @@ where
|
|||||||
.with(formatting_layer)
|
.with(formatting_layer)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
|
/// Register a subscriber as global default to process span data.
|
||||||
LogTracer::init().expect("Failed to set logger.");
|
pub fn init_subscriber(subscriber: impl Subscriber + Sync + Send) {
|
||||||
set_global_default(subscriber).expect("Failed to set subscriber.");
|
LogTracer::init().expect("Failed to set logger");
|
||||||
|
set_global_default(subscriber).expect("Failed to set subscriber");
|
||||||
}
|
}
|
@ -1,10 +1,16 @@
|
|||||||
use mail_app::configuration::{get_configuration, DatabaseSettings};
|
|
||||||
use mail_app::startup::run;
|
|
||||||
use mail_app::telemetry::{get_subscriber, init_subscriber};
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use sqlx::{Connection, Executor, PgConnection, PgPool};
|
|
||||||
use std::net::TcpListener;
|
use std::net::TcpListener;
|
||||||
|
use sqlx::{Connection, Executor, PgConnection, PgPool};
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
use once_cell::sync::Lazy;
|
||||||
|
|
||||||
|
use mail_app::startup::run;
|
||||||
|
use mail_app::configuration::{get_configuration, DatabaseSettings};
|
||||||
|
use mail_app::telemetry::{get_subscriber, init_subscriber};
|
||||||
|
|
||||||
|
pub struct TestApp {
|
||||||
|
pub address: String,
|
||||||
|
pub db_pool: PgPool,
|
||||||
|
}
|
||||||
|
|
||||||
static TRACING: Lazy<()> = Lazy::new(|| {
|
static TRACING: Lazy<()> = Lazy::new(|| {
|
||||||
let default_filter_level = "info".to_string();
|
let default_filter_level = "info".to_string();
|
||||||
@ -18,26 +24,23 @@ static TRACING: Lazy<()> = Lazy::new(|| {
|
|||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
pub struct TestApp {
|
// Create new instance of the application on a random port and return address [`http://localhost:XXXX`]
|
||||||
pub address: String,
|
|
||||||
pub db_pool: PgPool,
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn spawn_app() -> TestApp {
|
async fn spawn_app() -> TestApp {
|
||||||
Lazy::force(&TRACING);
|
Lazy::force(&TRACING);
|
||||||
|
|
||||||
let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind to random port.");
|
let listener = TcpListener::bind("127.0.0.1:0")
|
||||||
|
.expect("Failed to bind to random port");
|
||||||
let port = listener.local_addr().unwrap().port();
|
let port = listener.local_addr().unwrap().port();
|
||||||
let address = format!("http://127.0.0.1:{}", port);
|
let address = format!("http://127.0.0.1:{}", port);
|
||||||
|
|
||||||
let mut configuration = get_configuration().expect("Failed to read configuration");
|
let mut configuration = get_configuration()
|
||||||
configuration.database.database_name = Uuid::new_v4().to_string();
|
.expect("Failed to read configuration.");
|
||||||
|
configuration.database.database_name = Uuid::new_v4().to_string(); // Adjust database string to be random!
|
||||||
let connection_pool = configure_database(&configuration.database).await;
|
let connection_pool = configure_database(&configuration.database).await;
|
||||||
|
|
||||||
let server = run(listener, connection_pool.clone()).expect("Failed to bind address");
|
let server = run(listener, connection_pool.clone())
|
||||||
// Launch in background
|
.expect("Failed to bind address");
|
||||||
let _ = tokio::spawn(server);
|
let _ = tokio::spawn(server);
|
||||||
|
|
||||||
TestApp {
|
TestApp {
|
||||||
address,
|
address,
|
||||||
db_pool: connection_pool,
|
db_pool: connection_pool,
|
||||||
@ -45,25 +48,24 @@ async fn spawn_app() -> TestApp {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub async fn configure_database(config: &DatabaseSettings) -> PgPool {
|
pub async fn configure_database(config: &DatabaseSettings) -> PgPool {
|
||||||
// Create Database
|
// Create database
|
||||||
let mut connection = PgConnection::connect_with(&config.without_db())
|
let mut connection = PgConnection::connect_with(&config.without_db())
|
||||||
.await
|
.await
|
||||||
.expect("Failed to connect to Postgres.");
|
.expect("Failed to connect to Postgres");
|
||||||
connection
|
connection
|
||||||
.execute(format!(r#"CREATE DATABASE "{}";"#, config.database_name).as_str())
|
.execute(&*format!(r#"CREATE DATABASE "{}";"#, config.database_name))
|
||||||
.await
|
.await
|
||||||
.expect("Failed to create database.");
|
.expect("Failed to create database.");
|
||||||
|
|
||||||
// Migrate Database
|
// Migrate database
|
||||||
let connection_pool = PgPool::connect_with(config.with_db())
|
let connection_pool = PgPool::connect_with(config.with_db())
|
||||||
.await
|
.await
|
||||||
.expect("Failed to connect to Postgres.");
|
.expect("Failed to connect to Postgres.");
|
||||||
sqlx::migrate!("./migrations")
|
sqlx::migrate!("./migrations")
|
||||||
.run(&connection_pool)
|
.run(&connection_pool)
|
||||||
.await
|
.await
|
||||||
.expect("Failed to migrate the database.");
|
.expect("Failed to migrate the database");
|
||||||
|
|
||||||
// Return connection pool
|
|
||||||
connection_pool
|
connection_pool
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -78,21 +80,21 @@ async fn health_check_works() {
|
|||||||
.get(&format!("{}/health_check", &app.address))
|
.get(&format!("{}/health_check", &app.address))
|
||||||
.send()
|
.send()
|
||||||
.await
|
.await
|
||||||
.expect("Failed to execute request");
|
.expect("Failed to execute request.");
|
||||||
|
|
||||||
// Assert
|
// Assert our test
|
||||||
assert!(response.status().is_success());
|
assert!(response.status().is_success());
|
||||||
assert_eq!(Some(0), response.content_length());
|
assert_eq!(Some(0), response.content_length());
|
||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn subscribe_returns_a_200_for_valid_form_data() {
|
async fn subscribe_returns_200_for_valid_form_data() {
|
||||||
// Arrange
|
// Arrange
|
||||||
let app = spawn_app().await;
|
let app = spawn_app().await;
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
|
let body = "name=le%20guin&email=ursula_le_guin%40gmail.com";
|
||||||
|
|
||||||
// Act
|
// Act
|
||||||
let body = "name=le%20guin&email=ursula_le_guin%40gmail.com";
|
|
||||||
let response = client
|
let response = client
|
||||||
.post(&format!("{}/subscriptions", &app.address))
|
.post(&format!("{}/subscriptions", &app.address))
|
||||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||||
@ -101,7 +103,7 @@ async fn subscribe_returns_a_200_for_valid_form_data() {
|
|||||||
.await
|
.await
|
||||||
.expect("Failed to execute request.");
|
.expect("Failed to execute request.");
|
||||||
|
|
||||||
// Assert
|
// Assert test
|
||||||
assert_eq!(200, response.status().as_u16());
|
assert_eq!(200, response.status().as_u16());
|
||||||
|
|
||||||
let saved = sqlx::query!("SELECT email, name FROM subscriptions",)
|
let saved = sqlx::query!("SELECT email, name FROM subscriptions",)
|
||||||
@ -114,14 +116,14 @@ async fn subscribe_returns_a_200_for_valid_form_data() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[tokio::test]
|
#[tokio::test]
|
||||||
async fn subscribe_returns_a_400_when_data_is_missing() {
|
async fn subscribe_returns_400_for_missing_form_data() {
|
||||||
// Arrange
|
//Arrange
|
||||||
let app = spawn_app().await;
|
let app = spawn_app().await;
|
||||||
let client = reqwest::Client::new();
|
let client = reqwest::Client::new();
|
||||||
let test_cases = vec![
|
let test_cases = vec![
|
||||||
("name=le%20guin", "missing email"),
|
("name=le%20guin", "missing the email"),
|
||||||
("email=ursula_le_guin%40gmail.com", "missing name"),
|
("email=ursula_le_guin%40gmail.com", "missing the name"),
|
||||||
("", "missing both name and email"),
|
("", "missing both name and email")
|
||||||
];
|
];
|
||||||
|
|
||||||
for (invalid_body, error_message) in test_cases {
|
for (invalid_body, error_message) in test_cases {
|
||||||
@ -138,8 +140,37 @@ async fn subscribe_returns_a_400_when_data_is_missing() {
|
|||||||
assert_eq!(
|
assert_eq!(
|
||||||
400,
|
400,
|
||||||
response.status().as_u16(),
|
response.status().as_u16(),
|
||||||
"The API id not fail wth 400 Bad Request when the payload was {}.",
|
// Customised error message on test failure
|
||||||
error_message
|
"The API did not fail with 400 Bad Request when the payload was {}.", error_message
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::test]
|
||||||
|
async fn subscribe_returns_a_400_when_fields_are_present_but_empty() {
|
||||||
|
// Arrange
|
||||||
|
let app = spawn_app().await;
|
||||||
|
let client = reqwest::Client::new();
|
||||||
|
let test_cases = vec![
|
||||||
|
("name=&email=ursula_le_guin%40gmail.com", "empty name"),
|
||||||
|
("name=Ursula&email=", "empty email"),
|
||||||
|
("name=Ursula&email=definitely-not-an-email", "invalid email"),
|
||||||
|
];
|
||||||
|
|
||||||
|
for (body, description) in test_cases {
|
||||||
|
// Act
|
||||||
|
let response = client
|
||||||
|
.post(&format!("{}/subscriptions", &app.address))
|
||||||
|
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||||
|
.body(body)
|
||||||
|
.send()
|
||||||
|
.await
|
||||||
|
.expect("Failed to execute request.");
|
||||||
|
|
||||||
|
// Assert
|
||||||
|
assert_eq!(400,
|
||||||
|
response.status().as_u16(),
|
||||||
|
"The API did not return a 400 Bad Request when the payload was {}.", description
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
Loading…
Reference in New Issue
Block a user