Compare commits

...

No commits in common. "master" and "2023-02-19" have entirely different histories.

33 changed files with 1748 additions and 1675 deletions

View File

@ -1,8 +0,0 @@
target/
.vscode
tests/
.git
.gitignore
.env
Dockerfile
migrations/

25
.cargo/config.toml Normal file
View File

@ -0,0 +1,25 @@
# On Windows
# ```
# cargo install -f cargo-binutils
# rustup component add llvm-tools-preview
# ```
[target.x86_64-pc-windows-msvc]
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
[target.x86_64-pc-windows-gnu]
rustflags = ["-C", "link-arg=-fuse-ld=lld"]
# On Linux:
# - Ubuntu, `sudo apt-get install lld clang`
# - Arch, `sudo pacman -S lld clang`
[target.x86_64-unknown-linux-gnu]
rustflags = ["-C", "linker=clang", "-C", "link-arg=-fuse-ld=lld"]
# On MacOS, `brew install michaeleisel/zld/zld`
# [target.x86_64-apple-darwin]
# rustflags = ["-C", "link-arg=-fuse-ld=/usr/local/bin/zld"]
[target.aarch64-apple-darwin]
rustflags = [
"-C",
"link-arg=-fuse-ld=/opt/homebrew/Cellar/llvm/17.0.6_1/bin/ld64.lld",
]

6
.dockerignore Normal file
View File

@ -0,0 +1,6 @@
.env
target/
tests/
Dockerfile
scripts/
migrations/

View File

@ -1,48 +0,0 @@
kind: pipeline
type: docker
name: mailApp
trigger:
branch:
- master
event:
- push
steps:
- name: postgresDBTest # Test that the service is ready to be acted upon for cargo tests
image: postgres:12
environment:
PGPASSWORD: password
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter
commands:
- sleep 35
- "psql -U postgres -d newsletter -h postgres"
- name: sqlxMigrate
image: rust:1.57
environment:
DATABASE_URL: postgres://postgres:password@postgres:5432/newsletter
SKIP_DOCKER:
from_secret: SKIP_DOCKER
commands:
- apt update && apt install -y build-essential pkg-config libssl-dev # Dependancies for sqlx
- cargo install --version=0.5.7 sqlx-cli --no-default-features --features postgres # Install sqlx
- sqlx database create
- sqlx migrate run
- name: test
image: rust:1.57
environment:
APP_ENVIRONMENT: drone
commands:
- apt update && apt install -y build-essential pkg-config libssl-dev # Dependancies for tarpaulin
- cargo install cargo-tarpaulin
- cargo tarpaulin -v --all-features --timeout 120 --color always # RUN THOSE TESTS
services:
- name: postgres
image: postgres:12
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_DB: newsletter

10
.gitignore vendored
View File

@ -1,3 +1,11 @@
/target /target
.vscode .vscode
.env #.env
.gitlab-ci-local
.DS_Store
# Added by cargo
#
# already existing elements were commented out
#/target

1
.gitlab-ci-local-env Normal file
View File

@ -0,0 +1 @@
PRIVILEGED=true

View File

@ -1,30 +1,57 @@
image: "rust:latest" image: "rust:latest"
services:
- postgres:latest
variables:
POSTGRES_DB: newsletter
POSTGRES_USER: postgres
POSTGRES_PASSWORD: password
POSTGRES_HOST: postgres
DB_PORT: 5432
DATABASE_URL: "postgres://$POSTGRES_USER:$POSTGRES_PASSWORD@$POSTGRES_HOST:$DB_PORT/$POSTGRES_DB"
APP_DATABASE__HOST: $POSTGRES_HOST
cache: # Caches build artifacts so we don't build from scratch in both build and test
key: ${CI_COMMIT_REF_SLUG}
paths:
- .cargo/bin
- .cargo/registry/index
- .cargo/registry/cache
- target/debug/deps
- target/debug/build
policy: pull-push
default: default:
before_script: before_script:
- rustc --version - export CARGO_HOME="$CI_PROJECT_DIR/.cargo"
- export PATH="$CARGO_HOME/bin:$PATH"
- rustc --version
- cargo --version - cargo --version
- apt update -yq && apt-get install -yq postgresql-client lld clang
- if ! [ -x "$(command -v cargo-sqlx)" ]; then cargo install --version='~0.7' sqlx-cli --no-default-features --features rustls,postgres; fi
- SKIP_DOCKER=true ./scripts/init_db.sh
# This is to ensure that the database is reachable and give it some time to initialize.
- until psql "dbname=$POSTGRES_DB user=$POSTGRES_USER password=$POSTGRES_PASSWORD host=postgres" -c '\l'; do sleep 3; done
stages: stages:
- build
- test - test
build:
stage: build
script:
- cargo build
test-code: test-code:
stage: test stage: test
script: script:
- cargo test - cargo test
- cargo install cargo-tarpaulin - if ! [ -x "$(command -v cargo-tarpaulin)" ]; then cargo install cargo-tarpaulin; fi
- cargo tarpaulin --ignore-tests - cargo tarpaulin --ignore-tests
lint-code: lint-code:
stage: test stage: test
script: script:
- rustup component add rustfmt
- cargo fmt -- --check
- rustup component add clippy - rustup component add clippy
- cargo clippy -- -D warnings - cargo clippy -- -D warnings
audit-code:
stage: test
script:
- cargo install cargo-audit
- cargo audit

View File

@ -0,0 +1,17 @@
{
"db_name": "PostgreSQL",
"query": "\n INSERT INTO subscriptions (id, email, name, subscribed_at)\n VALUES ($1, $2, $3, $4)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text",
"Text",
"Timestamptz"
]
},
"nullable": []
},
"hash": "bcfcfebc6f5e8ffbf97d97c5a209be78b46d703924482cf8b43842705fcb7714"
}

2324
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,7 +1,6 @@
[package] [package]
name = "mail_app" name = "mail_app"
version = "0.1.0" version = "0.1.0"
authors = ["NickBland <nick.bland@nickbland.dev>"]
edition = "2021" edition = "2021"
[lib] [lib]
@ -12,27 +11,32 @@ path = "src/main.rs"
name = "mail_app" name = "mail_app"
[dependencies] [dependencies]
actix-web = "=4.0.0-beta.16" actix-web = "4"
tokio = { version = "1", features = ["macros", "rt-multi-thread"] } tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
serde = "1.0.115" serde = { version = "1", features = ["derive"] }
config = { version = "0.11", default-features = false, features = ["yaml"] } serde-aux = "4"
sqlx = { version = "0.5.5", default-features = false, features = [ "runtime-actix-rustls", "macros", "postgres", "uuid", "chrono", "migrate", "offline"] } config = { version = "0.13", default-features = false, features = ["yaml"] }
uuid = { version = "0.8.1", features = ["v4"] } uuid = { version = "1", features = ["v4"] }
chrono = "0.4.15" chrono = { version = "0.4.22", default-features = false, features = ["clock"] }
tracing = "0.1.19" tracing = { version = "0.1", features = ["log"] }
tracing-subscriber = { version = "0.3", features = ["registry", "env-filter"] } tracing-subscriber = { version = "0.3", features = ["registry", "env-filter"] }
tracing-bunyan-formatter = "0.3.1" tracing-bunyan-formatter = "0.3"
tracing-log = "0.1.1" tracing-actix-web = "0.7"
serde-aux = "3" tracing-log = "0.1"
tracing-actix-web = "0.5.0-beta.7"
secrecy = { version = "0.8", features = ["serde"] } secrecy = { version = "0.8", features = ["serde"] }
unicode-segmentation = "1"
validator = "0.14" [dependencies.sqlx]
version = "0.7"
default-features = false
features = [
"runtime-tokio-rustls",
"macros",
"postgres",
"uuid",
"chrono",
"migrate",
]
[dev-dependencies] [dev-dependencies]
reqwest = { version = "0.11", features = ["json"] } reqwest = { version = "0.11", features = ["json"] }
once_cell = "1.7.2" once_cell = "1"
claim = "0.5"
fake = "~2.3"
quickcheck = "0.9.2"
quickcheck_macros = "0.9.1"

View File

@ -1,30 +1,32 @@
FROM lukemathwalker/cargo-chef:latest-rust-1.57.0 AS chef ##### Chef
FROM lukemathwalker/cargo-chef:latest-rust-1.76.0 as chef
WORKDIR /app WORKDIR /app
RUN apt update && apt install lld clang -y
# Create lock file for project to be used in builder ##### Planner
FROM chef AS planner FROM chef as planner
COPY . . COPY . .
RUN cargo chef prepare --recipe-path recipe.json RUN cargo chef prepare --recipe-path recipe.json
# Build dependancies ##### Builder
# Builder prepares project dependancies, not the application.
FROM chef as builder FROM chef as builder
COPY --from=planner /app/recipe.json recipe.json COPY --from=planner /app/recipe.json recipe.json
RUN cargo chef cook --release --recipe-path recipe.json RUN cargo chef cook --release --recipe-path recipe.json
COPY . . COPY . .
ENV SQLX_OFFLINE true ENV SQLX_OFFLINE true
RUN cargo build --release # Now build the application itself.
RUN cargo build --release --bin mail_app
FROM debian:bullseye-slim AS runtime ##### Runtime
FROM debian:bookworm-slim as runtime
WORKDIR /app WORKDIR /app
RUN apt update && apt install -y --no-install-recommends openssl ca-certificates \
# Install dependancies required && apt autoremove -y \
RUN apt update -y && apt install -y --no-install-recommends openssl && apt autoremove -y && apt clean -y && rm -rf /var/lib/apt/lists/* && apt clean -y \
&& rm -rf /var/lib/apt/lists/*
# Copy the fully built binary and configuration to the image
COPY --from=builder /app/target/release/mail_app mail_app COPY --from=builder /app/target/release/mail_app mail_app
COPY configuration configuration COPY configuration configuration
ENV APP_ENVIRONMENT production ENV APP_ENVIRONMENT production
ENTRYPOINT ["./mail_app"]
ENTRYPOINT ["./mail_app"]

View File

@ -5,4 +5,4 @@ database:
port: 5432 port: 5432
username: "postgres" username: "postgres"
password: "password" password: "password"
database_name: "newsletter" database_name: "newsletter"

View File

@ -1,5 +0,0 @@
application:
host: 0.0.0.0
database:
host: "postgres"
require_ssl: false

View File

@ -1,4 +1,4 @@
application: application:
host: 127.0.0.1 host: 127.0.0.1
database: database:
require_ssl: false require_ssl: false

View File

@ -1,4 +1,4 @@
application: application:
host: 0.0.0.0 host: 0.0.0.0
database: database:
require_ssl: true require_ssl: true

View File

@ -1,5 +1,6 @@
-- migrations/{timestamp}_create_subscriptions_table.sql
-- Create Subscriptions Table -- Create Subscriptions Table
CREATE TABLE Subscriptions( CREATE TABLE subscriptions(
id uuid NOT NULL, id uuid NOT NULL,
PRIMARY KEY (id), PRIMARY KEY (id),
email TEXT NOT NULL UNIQUE, email TEXT NOT NULL UNIQUE,

53
scripts/init_db.sh Normal file → Executable file
View File

@ -3,50 +3,49 @@ set -x
set -eo pipefail set -eo pipefail
if ! [ -x "$(command -v psql)" ]; then if ! [ -x "$(command -v psql)" ]; then
echo >&2 "Error: `psql` is not installed." echo >&2 "Error: psql is not installed."
echo >&2 "Use:" exit 1
echo >&2 " sudo apt update && sudo apt install postgresql-client"
echo >&2 "to install it."
exit 1
fi fi
if ! [ -x "$(command -v sqlx)" ]; then if ! [ -x "$(command -v sqlx)" ]; then
echo >&2 "Error: `sqlx` is not installed." echo >&2 "Error: sqlx is not installed."
echo >&2 "Use:" echo >&2 "Use:"
echo >&2 " sudo apt install build-essential pkg-config libssl-dev" echo >&2 " cargo install --version="~0.6" sqlx-cli"
echo >&2 " cargo install --version=0.5.7 sqlx-cli --no-default-features --features postgres" echo >&2 " --no-default-features --feature rustls,postgres"
echo >&2 "to install it." echo >&2 "to install."
exit 1 exit 1
fi fi
DB_USER=${POSTGRES_USER:=postgres} DB_USER="${POSTGRES_USER:=postgres}"
DB_PASSWORD="${POSTGRES_PASSWORD:=password}" DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
DB_NAME="${POSTGRES_DB:=newsletter}" DB_NAME="${POSTGRES_DB:=newsletter}"
DB_PORT="${POSTGRES_PORT:=5432}" DB_PORT="${POSTGRES_PORT:=5432}"
DB_HOST="${POSTGRES_HOST:=localhost}"
# Allow to skip Docker installation if Postgres is already running
if [[ -z "${SKIP_DOCKER}" ]] if [[ -z "${SKIP_DOCKER}" ]]
then then
docker run \ docker run \
--name postgres-db \ -e POSTGRES_USER=${DB_USER} \
-e POSTGRES_USER=${DB_USER} \ -e POSTGRES_PASSWORD=${DB_PASSWORD} \
-e POSTGRES_PASSWORD=${DB_PASSWORD} \ -e POSTGRES_DB=${DB_NAME} \
-e POSTGRES_DB=${DB_NAME} \ -p "${DB_PORT}":5432 \
-p "${DB_PORT}":5432 \ --name "mailAppDB" \
-d postgres \ -d postgres:alpine \
postgres -N 1000 postgres -N 1000
fi fi
# Ping until ready to accept commands
export PGPASSWORD="${DB_PASSWORD}" export PGPASSWORD="${DB_PASSWORD}"
until psql -h "localhost" -U "${DB_USER}" -p "${DB_PORT}" -d "postgres" -c '\q'; do until psql -h "${DB_HOST}" -U "${DB_USER}" -p "${DB_PORT}" -d "postgres" -c '\q'; do
>&2 echo "Postgres is still unavailable - sleeping" >&2 echo "Postgres is still unavailable - sleeping"
sleep 1 sleep 1
done done
>&2 echo "Postgres is up and running on port ${DB_PORT} - running migrations." >&2 echo "Postgres is running on port ${DB_PORT}, and ready to accept commands!"
export DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@localhost:${DB_PORT}/${DB_NAME} DATABASE_URL=postgres://${DB_USER}:${DB_PASSWORD}@${DB_HOST}:${DB_PORT}/${DB_NAME}
export DATABASE_URL
sqlx database create sqlx database create
sqlx migrate run sqlx migrate run
>&2 echo "Postgres has been migrated. Jobs Complete." >&2 echo "Postgres has been migrated, ready for queries!"

14
scripts/remove_test_dbs.sh Executable file
View File

@ -0,0 +1,14 @@
#!/usr/bin/env bash
set -x
set -eo pipefail
DB_USER="${POSTGRES_USER:=postgres}"
DB_PASSWORD="${POSTGRES_PASSWORD:=password}"
DB_NAME="${POSTGRES_DB:=newsletter}"
DB_PORT="${POSTGRES_PORT:=5432}"
DB_HOST="${POSTGRES_HOST:=localhost}"
for dbname in $(psql -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" -d "$DB_NAME" -c "copy (select datname from pg_database where datname like '%-%-%-%-%') to stdout") ; do
echo "dropping database $dbname"
dropdb -h "$DB_HOST" -p "$DB_PORT" -U "$DB_USER" "$dbname"
done

View File

@ -1,41 +0,0 @@
#! spec.yaml
name: rust-project
region: sgp1
services:
- name: rust-project
dockerfile_path: Dockerfile
source_dir: .
github:
repo: NickBland/mailApp
branch: master
deploy_on_push: true
health_check:
http_path: /health_check
http_port: 8000
instance_count: 1
instance_size_slug: basic-xxs
routes:
- path: /
envs:
- key: APP_DATABASE__USERNAME
scope: RUN_TIME
value: ${newsletter.USERNAME}
- key: APP_DATABASE__PASSWORD
scope: RUN_TIME
value: ${newsletter.PASSWORD}
- key: APP_DATABASE__HOST
scope: RUN_TIME
value: ${newsletter.HOSTNAME}
- key: APP_DATABASE__PORT
scope: RUN_TIME
value: ${newsletter.PORT}
- key: APP_DATABASE__DATABASE_NAME
scope: RUN_TIME
value: ${newsletter.DATABASE}
databases:
- engine: PG
name: newsletter
num_nodes: 1
size: deb-s-dev-database
version: "12"

View File

@ -1,18 +0,0 @@
{
"db": "PostgreSQL",
"793f0df728d217c204123f12e4eafd6439db2d49d0cb506618ae9e780c7e0558": {
"query": "\n INSERT INTO subscriptions (id, email, name, subscribed_at)\n VALUES ($1, $2, $3, $4)\n ",
"describe": {
"columns": [],
"parameters": {
"Left": [
"Uuid",
"Text",
"Text",
"Timestamptz"
]
},
"nullable": []
}
}
}

View File

@ -1,8 +1,6 @@
use std::convert::{TryFrom, TryInto}; use secrecy::{ExposeSecret, Secret};
use serde_aux::field_attributes::deserialize_number_from_string; use serde_aux::field_attributes::deserialize_number_from_string;
use sqlx::postgres::{PgConnectOptions, PgSslMode}; use sqlx::postgres::{PgConnectOptions, PgSslMode};
use sqlx::ConnectOptions;
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
pub struct Settings { pub struct Settings {
@ -10,6 +8,17 @@ pub struct Settings {
pub application: ApplicationSettings, pub application: ApplicationSettings,
} }
#[derive(serde::Deserialize)]
pub struct DatabaseSettings {
pub username: String,
pub password: Secret<String>,
#[serde(deserialize_with = "deserialize_number_from_string")]
pub port: u16,
pub host: String,
pub database_name: String,
pub require_ssl: bool,
}
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
pub struct ApplicationSettings { pub struct ApplicationSettings {
#[serde(deserialize_with = "deserialize_number_from_string")] #[serde(deserialize_with = "deserialize_number_from_string")]
@ -17,15 +26,62 @@ pub struct ApplicationSettings {
pub host: String, pub host: String,
} }
#[derive(serde::Deserialize)] pub enum Environment {
pub struct DatabaseSettings { Local,
pub username: String, Production,
pub password: String, }
#[serde(deserialize_with = "deserialize_number_from_string")]
pub port: u16, impl Environment {
pub host: String, pub fn as_str(&self) -> &'static str {
pub database_name: String, match self {
pub require_ssl: bool, Environment::Local => "local",
Environment::Production => "production",
}
}
}
impl TryFrom<String> for Environment {
type Error = String;
fn try_from(s: String) -> Result<Self, Self::Error> {
match s.to_lowercase().as_str() {
"local" => Ok(Environment::Local),
"production" => Ok(Environment::Production),
e => Err(format!(
"{} is not a supported environment. Use `local` or `production`",
e
)),
}
}
}
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
let configuration_directory = base_path.join("configuration");
// Detect current environment, default to LOCAL
let environment: Environment = std::env::var("APP_ENVIRONMENT")
.unwrap_or_else(|_| "local".into())
.try_into()
.expect("Failed to parse APP_ENVIRONMENT");
let environment_filename = format!("{}.yaml", environment.as_str());
// initialise config reader
let settings = config::Config::builder()
.add_source(config::File::from(
configuration_directory.join("base.yaml"),
))
.add_source(config::File::from(
configuration_directory.join(environment_filename),
))
.add_source(
config::Environment::with_prefix("APP")
.prefix_separator("_")
.separator("__"),
)
.build()?;
settings.try_deserialize::<Settings>()
} }
impl DatabaseSettings { impl DatabaseSettings {
@ -36,71 +92,14 @@ impl DatabaseSettings {
PgSslMode::Prefer PgSslMode::Prefer
}; };
PgConnectOptions::new() PgConnectOptions::new()
.host(&self.host)
.username(&self.username) .username(&self.username)
.password(&self.password) .password(self.password.expose_secret())
.host(&self.host)
.port(self.port) .port(self.port)
.ssl_mode(ssl_mode) .ssl_mode(ssl_mode)
} }
pub fn with_db(&self) -> PgConnectOptions { pub fn with_db(&self) -> PgConnectOptions {
let mut options = self.without_db().database(&self.database_name); self.without_db().database(&self.database_name)
options.log_statements(tracing::log::LevelFilter::Trace);
options
} }
} }
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
// Initialise configuration reader
let mut settings = config::Config::default();
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
let configuration_directory = base_path.join("configuration");
// Read default config file
settings.merge(config::File::from(configuration_directory.join("base")).required(true))?;
let environment: Environment = std::env::var("APP_ENVIRONMENT")
.unwrap_or_else(|_| "local".into())
.try_into()
.expect("Failed to parse APP_ENVIRONMENT.");
settings.merge(
config::File::from(configuration_directory.join(environment.as_str())).required(true),
)?;
settings.merge(config::Environment::with_prefix("app").separator("__"))?;
// Try convert into Settings type
settings.try_into()
}
pub enum Environment {
Local,
Production,
Drone
}
impl Environment {
pub fn as_str(&self) -> &'static str {
match self {
Environment::Local => "local",
Environment::Production => "production",
Environment::Drone => "drone",
}
}
}
impl TryFrom<String> for Environment {
type Error = String;
fn try_from(s: String) -> Result<Self, Self::Error> {
match s.to_lowercase().as_str() {
"local" => Ok(Self::Local),
"production" => Ok(Self::Production),
"drone" => Ok(Self::Drone),
other => Err(format!(
"{} is nto a supported environment. Use either `local`, `production` or `drone`.", other
)),
}
}
}

View File

@ -1,7 +0,0 @@
mod subscriber_name;
mod subscriber_email;
mod new_subscriber;
pub use subscriber_name::SubscriberName;
pub use new_subscriber::NewSubscriber;
pub use subscriber_email::SubscriberEmail;

View File

@ -1,7 +0,0 @@
use crate::domain::SubscriberName;
use crate::domain::SubscriberEmail;
pub struct NewSubscriber {
pub email: SubscriberEmail,
pub name: SubscriberName,
}

View File

@ -1,61 +0,0 @@
use validator::validate_email;
#[derive(Debug)]
pub struct SubscriberEmail(String);
impl SubscriberEmail {
pub fn parse(s: String) -> Result<SubscriberEmail, String> {
if validate_email(&s) {
Ok(Self(s))
} else {
Err(format!("{} is not a valid email address", s))
}
}
}
impl AsRef<str> for SubscriberEmail {
fn as_ref(&self) -> &str {
&self.0
}
}
#[cfg(test)]
mod tests {
use super::SubscriberEmail;
use claim::assert_err;
use fake::faker::internet::en::SafeEmail;
use fake::Fake;
#[test]
fn empty_string_is_rejected() {
let email = "".to_string();
assert_err!(SubscriberEmail::parse(email));
}
#[test]
fn email_missing_at_symbol_is_rejected() {
let email = "ursuladomain.com".to_string();
assert_err!(SubscriberEmail::parse(email));
}
#[test]
fn email_missing_subject_is_rejected() {
let email = "@domain.com".to_string();
assert_err!(SubscriberEmail::parse(email));
}
#[derive(Debug, Clone)]
struct ValidEmailFixture(pub String);
impl quickcheck::Arbitrary for ValidEmailFixture {
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Self {
let email = SafeEmail().fake_with_rng(g);
Self(email)
}
}
#[quickcheck_macros::quickcheck]
fn valid_emails_are_parsed_successfully(valid_email: ValidEmailFixture) -> bool {
SubscriberEmail::parse(valid_email.0).is_ok()
}
}

View File

@ -1,64 +0,0 @@
use unicode_segmentation::UnicodeSegmentation;
#[derive(Debug)]
pub struct SubscriberName(String);
impl SubscriberName {
pub fn parse(s: String) -> Result<SubscriberName, String> {
let is_empty_or_whitespace = s.trim().is_empty(); // Remove trailing whitespaces and check if containsd any characters
let is_too_long = s.graphemes(true).count() > 256;
let forbidden_characters = ['/', '(', ')', '"', '<', '>', '\\', '{', '}'];
let contains_forbidden_characters = s.chars().any(|g| forbidden_characters.contains(&g)); // Iterate to check if name contains any of the forbidden characters
// Return `false` if any conditions are violated
if is_empty_or_whitespace || is_too_long || contains_forbidden_characters {
Err(format!("{} is not a valid subscriber name", s))
} else {
Ok(Self(s))
}
}
}
impl AsRef<str> for SubscriberName {
fn as_ref(&self) -> &str {
&self.0
}
}
#[cfg(test)]
mod tests {
use crate::domain::SubscriberName;
use claim::{assert_err, assert_ok};
#[test]
fn a_name_longer_than_256_graphemes_is_rejected() {
let name = "a".repeat(257);
assert_err!(SubscriberName::parse(name));
}
#[test]
fn whitespace_only_names_are_rejected() {
let name = " ".to_string();
assert_err!(SubscriberName::parse(name));
}
#[test]
fn empty_string_is_rejected() {
let name = "".to_string();
assert_err!(SubscriberName::parse(name));
}
#[test]
fn names_containing_invalid_characters_are_rejected() {
for name in &['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
let name = name.to_string();
assert_err!(SubscriberName::parse(name));
}
}
#[test]
fn a_valid_name_is_parsed_successfully() {
let name = "Ursula Le Guin".to_string();
assert_ok!(SubscriberName::parse(name));
}
}

View File

@ -1,6 +1,4 @@
#![allow(clippy::toplevel_ref_arg)] pub mod configuration;
pub mod configuration; pub mod routes;
pub mod routes; pub mod startup;
pub mod startup; pub mod telemetry;
pub mod telemetry;
pub mod domain;

View File

@ -1,26 +1,20 @@
use std::net::TcpListener;
use sqlx::postgres::PgPoolOptions;
use mail_app::startup::run;
use mail_app::configuration::get_configuration; use mail_app::configuration::get_configuration;
use mail_app::startup::run;
use mail_app::telemetry::{get_subscriber, init_subscriber}; use mail_app::telemetry::{get_subscriber, init_subscriber};
use sqlx::postgres::PgPoolOptions;
use std::net::TcpListener;
#[tokio::main] #[tokio::main]
async fn main() -> std::io::Result<()> { async fn main() -> Result<(), std::io::Error> {
let subscriber = get_subscriber("mail_app".into(), "info".into(), std::io::stdout); let subscriber = get_subscriber("mail_app".into(), "info".into(), std::io::stdout);
init_subscriber(subscriber); init_subscriber(subscriber);
// Attempt to read from config let configuration = get_configuration().expect("Failed to read configuration");
let configuration = get_configuration().expect("Failed to read configuration data."); let connection_pool = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db());
let address = format!(
// Configure connection to database for our startup "{}:{}",
let connection_pool = PgPoolOptions::new() configuration.application.host, configuration.application.port
.connect_timeout(std::time::Duration::from_secs(2)) );
.connect_lazy_with(configuration.database.with_db());
// Take port from settings file
let address = format!("{}:{}", configuration.application.host, configuration.application.port);
let listener = TcpListener::bind(address)?; let listener = TcpListener::bind(address)?;
run(listener, connection_pool)?.await?; run(listener, connection_pool)?.await
Ok(()) }
}

View File

@ -2,4 +2,4 @@ use actix_web::HttpResponse;
pub async fn health_check() -> HttpResponse { pub async fn health_check() -> HttpResponse {
HttpResponse::Ok().finish() HttpResponse::Ok().finish()
} }

View File

@ -2,4 +2,4 @@ mod health_check;
mod subscriptions; mod subscriptions;
pub use health_check::*; pub use health_check::*;
pub use subscriptions::*; pub use subscriptions::*;

View File

@ -1,27 +1,14 @@
use actix_web::{web, HttpResponse}; use actix_web::{web, HttpResponse};
use sqlx::PgPool;
use chrono::Utc; use chrono::Utc;
use sqlx::PgPool;
use uuid::Uuid; use uuid::Uuid;
use crate::domain::{NewSubscriber, SubscriberName, SubscriberEmail};
#[derive(serde::Deserialize)] #[derive(serde::Deserialize)]
pub struct FormData { pub struct FormData {
email: String, email: String,
name: String name: String,
} }
impl TryFrom<FormData> for NewSubscriber {
type Error = String;
fn try_from(value: FormData) -> Result<Self, Self::Error> {
let name = SubscriberName::parse(value.name)?;
let email = SubscriberEmail::parse(value.email)?;
Ok(Self { email, name })
}
}
#[allow(clippy::async_yields_async)]
#[tracing::instrument( #[tracing::instrument(
name = "Adding a new subscriber", name = "Adding a new subscriber",
skip(form, pool), skip(form, pool),
@ -30,12 +17,9 @@ impl TryFrom<FormData> for NewSubscriber {
subscriber_name = %form.name subscriber_name = %form.name
) )
)] )]
pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>,) -> HttpResponse {
let new_subscriber = match form.0.try_into() { pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>) -> HttpResponse {
Ok(form) => form, match insert_subscriber(&pool, &form).await {
Err(_) => return HttpResponse::BadRequest().finish(),
};
match insert_subscriber(&pool, &new_subscriber).await {
Ok(_) => HttpResponse::Ok().finish(), Ok(_) => HttpResponse::Ok().finish(),
Err(_) => HttpResponse::InternalServerError().finish(), Err(_) => HttpResponse::InternalServerError().finish(),
} }
@ -43,17 +27,18 @@ pub async fn subscribe(form: web::Form<FormData>, pool: web::Data<PgPool>,) -> H
#[tracing::instrument( #[tracing::instrument(
name = "Saving new subscriber details in the database", name = "Saving new subscriber details in the database",
skip(new_subscriber, pool) skip(form, pool)
)] )]
pub async fn insert_subscriber(pool: &PgPool, new_subscriber: &NewSubscriber) -> Result<(), sqlx::Error> {
pub async fn insert_subscriber(pool: &PgPool, form: &FormData) -> Result<(), sqlx::Error> {
sqlx::query!( sqlx::query!(
r#" r#"
INSERT INTO subscriptions (id, email, name, subscribed_at) INSERT INTO subscriptions (id, email, name, subscribed_at)
VALUES ($1, $2, $3, $4) VALUES ($1, $2, $3, $4)
"#, "#,
Uuid::new_v4(), Uuid::new_v4(),
new_subscriber.email.as_ref(), form.email,
new_subscriber.name.as_ref(), form.name,
Utc::now() Utc::now()
) )
.execute(pool) .execute(pool)
@ -63,4 +48,4 @@ pub async fn insert_subscriber(pool: &PgPool, new_subscriber: &NewSubscriber) ->
e e
})?; })?;
Ok(()) Ok(())
} }

View File

@ -1,11 +1,9 @@
use actix_web::{web, App, HttpServer};
use actix_web::dev::Server;
use actix_web::web::Data;
use std::net::TcpListener;
use sqlx::PgPool;
use tracing_actix_web::TracingLogger;
use crate::routes::{health_check, subscribe}; use crate::routes::{health_check, subscribe};
use actix_web::dev::Server;
use actix_web::{web, web::Data, App, HttpServer};
use sqlx::PgPool;
use std::net::TcpListener;
use tracing_actix_web::TracingLogger;
pub fn run(listener: TcpListener, db_pool: PgPool) -> Result<Server, std::io::Error> { pub fn run(listener: TcpListener, db_pool: PgPool) -> Result<Server, std::io::Error> {
let db_pool = Data::new(db_pool); let db_pool = Data::new(db_pool);
@ -19,4 +17,4 @@ pub fn run(listener: TcpListener, db_pool: PgPool) -> Result<Server, std::io::Er
.listen(listener)? .listen(listener)?
.run(); .run();
Ok(server) Ok(server)
} }

View File

@ -1,18 +1,18 @@
use tracing::{Subscriber, subscriber::set_global_default}; use tracing::{subscriber::set_global_default, Subscriber};
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer}; use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
use tracing_log::LogTracer; use tracing_log::LogTracer;
use tracing_subscriber::{layer::SubscriberExt, EnvFilter, Registry, fmt::MakeWriter}; use tracing_subscriber::{fmt::MakeWriter, layer::SubscriberExt, EnvFilter, Registry};
/// Compose multiple layers into a tracing compatible subscriber
pub fn get_subscriber<Sink>( pub fn get_subscriber<Sink>(
name: String, name: String,
env_filter: String, env_filter: String,
sink: Sink, sink: Sink,
) -> impl Subscriber + Sync + Send ) -> impl Subscriber + Send + Sync
where where
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static, Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
{ {
let env_filter = EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter)); let env_filter =
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
let formatting_layer = BunyanFormattingLayer::new(name, sink); let formatting_layer = BunyanFormattingLayer::new(name, sink);
Registry::default() Registry::default()
.with(env_filter) .with(env_filter)
@ -20,8 +20,7 @@ where
.with(formatting_layer) .with(formatting_layer)
} }
/// Register a subscriber as global default to process span data. pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
pub fn init_subscriber(subscriber: impl Subscriber + Sync + Send) { LogTracer::init().expect("Failed to set logger.");
LogTracer::init().expect("Failed to set logger"); set_global_default(subscriber).expect("Failed to set subscriber.");
set_global_default(subscriber).expect("Failed to set subscriber"); }
}

View File

@ -1,176 +1,145 @@
use std::net::TcpListener; use mail_app::configuration::{get_configuration, DatabaseSettings};
use sqlx::{Connection, Executor, PgConnection, PgPool}; use mail_app::startup::run;
use uuid::Uuid; use mail_app::telemetry::{get_subscriber, init_subscriber};
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use sqlx::{Connection, Executor, PgConnection, PgPool};
use mail_app::startup::run; use std::net::TcpListener;
use mail_app::configuration::{get_configuration, DatabaseSettings}; use uuid::Uuid;
use mail_app::telemetry::{get_subscriber, init_subscriber};
static TRACING: Lazy<()> = Lazy::new(|| {
pub struct TestApp { let default_filter_level = "info".to_string();
pub address: String, let subscriber_name = "test".to_string();
pub db_pool: PgPool, if std::env::var("TEST_LOG").is_ok() {
} let subscriber = get_subscriber(subscriber_name, default_filter_level, std::io::stdout);
init_subscriber(subscriber);
static TRACING: Lazy<()> = Lazy::new(|| { } else {
let default_filter_level = "info".to_string(); let subscriber = get_subscriber(subscriber_name, default_filter_level, std::io::sink);
let subscriber_name = "test".to_string(); init_subscriber(subscriber);
if std::env::var("TEST_LOG").is_ok() { };
let subscriber = get_subscriber(subscriber_name, default_filter_level, std::io::stdout); });
init_subscriber(subscriber);
} else { pub struct TestApp {
let subscriber = get_subscriber(subscriber_name, default_filter_level, std::io::sink); pub address: String,
init_subscriber(subscriber); pub db_pool: PgPool,
}; }
});
async fn spawn_app() -> TestApp {
// Create new instance of the application on a random port and return address [`http://localhost:XXXX`] Lazy::force(&TRACING);
async fn spawn_app() -> TestApp {
Lazy::force(&TRACING); let listener = TcpListener::bind("127.0.0.1:0").expect("Failed to bind to random port.");
let port = listener.local_addr().unwrap().port();
let listener = TcpListener::bind("127.0.0.1:0") let address = format!("http://127.0.0.1:{}", port);
.expect("Failed to bind to random port");
let port = listener.local_addr().unwrap().port(); let mut configuration = get_configuration().expect("Failed to read configuration");
let address = format!("http://127.0.0.1:{}", port); configuration.database.database_name = Uuid::new_v4().to_string();
let connection_pool = configure_database(&configuration.database).await;
let mut configuration = get_configuration()
.expect("Failed to read configuration."); let server = run(listener, connection_pool.clone()).expect("Failed to bind address");
configuration.database.database_name = Uuid::new_v4().to_string(); // Adjust database string to be random! // Launch in background
let connection_pool = configure_database(&configuration.database).await; let _ = tokio::spawn(server);
let server = run(listener, connection_pool.clone()) TestApp {
.expect("Failed to bind address"); address,
let _ = tokio::spawn(server); db_pool: connection_pool,
TestApp { }
address, }
db_pool: connection_pool,
} pub async fn configure_database(config: &DatabaseSettings) -> PgPool {
} // Create Database
let mut connection = PgConnection::connect_with(&config.without_db())
pub async fn configure_database(config: &DatabaseSettings) -> PgPool { .await
// Create database .expect("Failed to connect to Postgres.");
let mut connection = PgConnection::connect_with(&config.without_db()) connection
.await .execute(format!(r#"CREATE DATABASE "{}";"#, config.database_name).as_str())
.expect("Failed to connect to Postgres"); .await
connection .expect("Failed to create database.");
.execute(&*format!(r#"CREATE DATABASE "{}";"#, config.database_name))
.await // Migrate Database
.expect("Failed to create database."); let connection_pool = PgPool::connect_with(config.with_db())
.await
// Migrate database .expect("Failed to connect to Postgres.");
let connection_pool = PgPool::connect_with(config.with_db()) sqlx::migrate!("./migrations")
.await .run(&connection_pool)
.expect("Failed to connect to Postgres."); .await
sqlx::migrate!("./migrations") .expect("Failed to migrate the database.");
.run(&connection_pool)
.await // Return connection pool
.expect("Failed to migrate the database"); connection_pool
}
connection_pool
} #[tokio::test]
async fn health_check_works() {
#[tokio::test] // Arrange
async fn health_check_works() { let app = spawn_app().await;
// Arrange let client = reqwest::Client::new();
let app = spawn_app().await;
let client = reqwest::Client::new(); // Act
let response = client
// Act .get(&format!("{}/health_check", &app.address))
let response = client .send()
.get(&format!("{}/health_check", &app.address)) .await
.send() .expect("Failed to execute request");
.await
.expect("Failed to execute request."); // Assert
assert!(response.status().is_success());
// Assert our test assert_eq!(Some(0), response.content_length());
assert!(response.status().is_success()); }
assert_eq!(Some(0), response.content_length());
} #[tokio::test]
async fn subscribe_returns_a_200_for_valid_form_data() {
#[tokio::test] // Arrange
async fn subscribe_returns_200_for_valid_form_data() { let app = spawn_app().await;
// Arrange let client = reqwest::Client::new();
let app = spawn_app().await;
let client = reqwest::Client::new(); // Act
let body = "name=le%20guin&email=ursula_le_guin%40gmail.com"; let body = "name=le%20guin&email=ursula_le_guin%40gmail.com";
let response = client
// Act .post(&format!("{}/subscriptions", &app.address))
let response = client .header("Content-Type", "application/x-www-form-urlencoded")
.post(&format!("{}/subscriptions", &app.address)) .body(body)
.header("Content-Type", "application/x-www-form-urlencoded") .send()
.body(body) .await
.send() .expect("Failed to execute request.");
.await
.expect("Failed to execute request."); // Assert
assert_eq!(200, response.status().as_u16());
// Assert test
assert_eq!(200, response.status().as_u16()); let saved = sqlx::query!("SELECT email, name FROM subscriptions",)
.fetch_one(&app.db_pool)
let saved = sqlx::query!("SELECT email, name FROM subscriptions",) .await
.fetch_one(&app.db_pool) .expect("Failed to fetch saved subscription.");
.await
.expect("Failed to fetch saved subscription."); assert_eq!(saved.email, "ursula_le_guin@gmail.com");
assert_eq!(saved.name, "le guin");
assert_eq!(saved.email, "ursula_le_guin@gmail.com"); }
assert_eq!(saved.name, "le guin");
} #[tokio::test]
async fn subscribe_returns_a_400_when_data_is_missing() {
#[tokio::test] // Arrange
async fn subscribe_returns_400_for_missing_form_data() { let app = spawn_app().await;
//Arrange let client = reqwest::Client::new();
let app = spawn_app().await; let test_cases = vec![
let client = reqwest::Client::new(); ("name=le%20guin", "missing email"),
let test_cases = vec![ ("email=ursula_le_guin%40gmail.com", "missing name"),
("name=le%20guin", "missing the email"), ("", "missing both name and email"),
("email=ursula_le_guin%40gmail.com", "missing the name"), ];
("", "missing both name and email")
]; for (invalid_body, error_message) in test_cases {
// Act
for (invalid_body, error_message) in test_cases { let response = client
// Act .post(&format!("{}/subscriptions", &app.address))
let response = client .header("Content-Type", "application/x-www-form-urlencoded")
.post(&format!("{}/subscriptions", &app.address)) .body(invalid_body)
.header("Content-Type", "application/x-www-form-urlencoded") .send()
.body(invalid_body) .await
.send() .expect("Failed to execute request.");
.await
.expect("Failed to execute request."); // Assert
assert_eq!(
// Assert 400,
assert_eq!( response.status().as_u16(),
400, "The API id not fail wth 400 Bad Request when the payload was {}.",
response.status().as_u16(), error_message
// Customised error message on test failure );
"The API did not fail with 400 Bad Request when the payload was {}.", error_message }
); }
}
}
#[tokio::test]
async fn subscribe_returns_a_400_when_fields_are_present_but_empty() {
// Arrange
let app = spawn_app().await;
let client = reqwest::Client::new();
let test_cases = vec![
("name=&email=ursula_le_guin%40gmail.com", "empty name"),
("name=Ursula&email=", "empty email"),
("name=Ursula&email=definitely-not-an-email", "invalid email"),
];
for (body, description) in test_cases {
// Act
let response = client
.post(&format!("{}/subscriptions", &app.address))
.header("Content-Type", "application/x-www-form-urlencoded")
.body(body)
.send()
.await
.expect("Failed to execute request.");
// Assert
assert_eq!(400,
response.status().as_u16(),
"The API did not return a 400 Bad Request when the payload was {}.", description
);
}
}