Compare commits
12 Commits
da977cd720
...
master
Author | SHA1 | Date | |
---|---|---|---|
23b089bf3d
|
|||
fda6c7c044
|
|||
d96eae1fec
|
|||
96a6b6a351
|
|||
daf914bb8e
|
|||
7b5fa61780
|
|||
444e42351e
|
|||
3a0576ba48
|
|||
9336235b64
|
|||
70a4eb23c6
|
|||
ccf49ee214
|
|||
2c9dab01bb
|
17
.sqlx/query-4bd6bbade521cd577279e91d8a8b978748046beff031d153699b351089c3bf9b.json
generated
Normal file
17
.sqlx/query-4bd6bbade521cd577279e91d8a8b978748046beff031d153699b351089c3bf9b.json
generated
Normal file
@@ -0,0 +1,17 @@
|
||||
{
|
||||
"db_name": "PostgreSQL",
|
||||
"query": "\n INSERT INTO subscriptions (id, email, name, subscribed_at)\n VALUES ($1, $2, $3, $4)\n ",
|
||||
"describe": {
|
||||
"columns": [],
|
||||
"parameters": {
|
||||
"Left": [
|
||||
"Uuid",
|
||||
"Text",
|
||||
"Text",
|
||||
"Timestamptz"
|
||||
]
|
||||
},
|
||||
"nullable": []
|
||||
},
|
||||
"hash": "4bd6bbade521cd577279e91d8a8b978748046beff031d153699b351089c3bf9b"
|
||||
}
|
@@ -51,7 +51,8 @@ windows:
|
||||
panes:
|
||||
- lvim
|
||||
- test:
|
||||
layout: main-vertical
|
||||
layout: main-horizontal
|
||||
root: .
|
||||
panes:
|
||||
- cargo watch -x check -x test -x run
|
||||
- sleep 10 && cargo watch -x check -x test -x run
|
||||
- ./scripts/init_db.sh
|
||||
|
1332
Cargo.lock
generated
1332
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
22
Cargo.toml
22
Cargo.toml
@@ -3,7 +3,8 @@ name = "email_newsletter_api"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[profile.bench]
|
||||
debug = true
|
||||
|
||||
[lib]
|
||||
path = "src/lib.rs"
|
||||
@@ -16,12 +17,21 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.5.1"
|
||||
reqwest = "0.12.2"
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
config = "0.13"
|
||||
uuid = { version = "1.8.0", features = ["v4"] }
|
||||
chrono = { version = "0.4.38", default-features = false, features = ["clock"] }
|
||||
tracing = { version = "0.1.40", features = ["log"] }
|
||||
tracing-subscriber = { version = "0.3.18", features = ["registry", "env-filter"] }
|
||||
tracing-bunyan-formatter = "0.3.9"
|
||||
tracing-log = "0.2.0"
|
||||
secrecy = { version = "0.8.0", features = ["serde"] }
|
||||
tracing-actix-web = "0.7.10"
|
||||
h2 = "0.3.26"
|
||||
serde-aux = "4.5.0"
|
||||
unicode-segmentation = "1.11.0"
|
||||
validator = { version = "0.18.1", features = ["derive"] }
|
||||
|
||||
[dependencies.sqlx]
|
||||
version = "0.7"
|
||||
@@ -34,3 +44,11 @@ features = [
|
||||
"chrono",
|
||||
"migrate"
|
||||
]
|
||||
|
||||
[dev-dependencies]
|
||||
fake = "~2.3"
|
||||
claims = "0.7.1"
|
||||
once_cell = "1.19.0"
|
||||
reqwest = "0.12.2"
|
||||
quickcheck = "0.9.2"
|
||||
quickcheck_macros = "0.9.1"
|
||||
|
43
Dockerfile.production
Normal file
43
Dockerfile.production
Normal file
@@ -0,0 +1,43 @@
|
||||
# Using the `rust-musl-builder` as base image, instead of
|
||||
# the official Rust toolchain
|
||||
FROM clux/muslrust:stable AS chef
|
||||
USER root
|
||||
|
||||
RUN cargo install cargo-chef
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
FROM chef AS planner
|
||||
|
||||
COPY . .
|
||||
|
||||
RUN cargo chef prepare --recipe-path recipe.json
|
||||
|
||||
FROM chef AS builder
|
||||
COPY --from=planner /app/recipe.json recipe.json
|
||||
|
||||
# Notice that we are specifying the --target flag!
|
||||
|
||||
RUN cargo chef cook --release --target x86_64-unknown-linux-musl --recipe-path recipe.json
|
||||
|
||||
COPY . .
|
||||
|
||||
ENV SQLX_OFFLINE true
|
||||
|
||||
RUN cargo build --release --target x86_64-unknown-linux-musl --bin email_newsletter_api
|
||||
|
||||
FROM alpine AS runtime
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
RUN addgroup -S myuser && adduser -S myuser -G myuser
|
||||
|
||||
COPY --from=builder /app/target/x86_64-unknown-linux-musl/release/email_newsletter_api email_newsletter_api
|
||||
|
||||
COPY configuration configuration
|
||||
|
||||
USER myuser
|
||||
|
||||
ENV APP_ENVIRONMENT production
|
||||
|
||||
ENTRYPOINT ["./email_newsletter_api"]
|
@@ -9,4 +9,12 @@
|
||||
- Run `cargo watch -x check -x test -x run` to lint, test and run the binary as soon as you make a change to the file.
|
||||
- Bonus: install and use `mold`, a very fast linker that can link your Rust binary _blazingly fast_.
|
||||
|
||||
## Notable Dependencies
|
||||
|
||||
- `actix-web`: Most popular Rust web framework
|
||||
- `serde`: Data structure serialization/deserialization
|
||||
- `tokio`: Async Runtime
|
||||
- `tracing`: Alternative to traditional logging
|
||||
- `sqlx`: SQL toolkit for Rust. Offers compile-time SQL checked queries
|
||||
|
||||
## [Technical Write Up](./docs/technical_write_up.md)
|
||||
|
@@ -1,6 +1,8 @@
|
||||
application_port: 8000
|
||||
application:
|
||||
port: 8000
|
||||
host: 0.0.0.0
|
||||
database:
|
||||
host: "127.0.0.1"
|
||||
host: "localhost"
|
||||
port: 5432
|
||||
username: "postgres"
|
||||
password: "password"
|
4
configuration/local.yaml
Normal file
4
configuration/local.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
application:
|
||||
host: 127.0.0.1
|
||||
database:
|
||||
require_ssl: false
|
4
configuration/production.yaml
Normal file
4
configuration/production.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
application:
|
||||
host: 0.0.0.0
|
||||
database:
|
||||
require_ssl: true
|
@@ -3,3 +3,8 @@
|
||||
## SQLx
|
||||
|
||||
The SQLx library will run compile time checks to make sure our SQL queries are valid. This is done by running PostgreSQL queries during compile time. Therefore, it is important that DATABASE_URL must be properly set.
|
||||
|
||||
### Offline mode vs Online mode
|
||||
|
||||
- Online mode is when the database is up and running and therefore, `SQLx` can perform compile time SQL queries check against it.
|
||||
- Offline mode is when the database is NOT up and running. But we can save query metadata for offline usage and build to let the app run without SQLx complaining.
|
||||
|
@@ -2,11 +2,12 @@
|
||||
|
||||
## Routes
|
||||
|
||||
- `health_check`: returns a HTTP code 200 if the server is up and running. Response body is empty.
|
||||
- `subscribe` returns a HTTP code 200 if the user successfully subscribed to our email newsletter service. 400 if data is missing or invalid.
|
||||
- `health_check`: GET - returns a HTTP code 200 if the server is up and running. Response body is empty.
|
||||
- `subscriptions` POST - returns a HTTP code 200 if the user successfully subscribed to our email newsletter service. 400 if data is missing or invalid.
|
||||
|
||||
## Other topics
|
||||
|
||||
- [Tracing](./tracing.md)
|
||||
- [Database (PostgreSQL)](./database.md)
|
||||
- [Testing](./technical_write_up.md)
|
||||
- [Actic-web](./actix_web.md)
|
||||
|
24
docs/tracing.md
Normal file
24
docs/tracing.md
Normal file
@@ -0,0 +1,24 @@
|
||||
# Tracing
|
||||
|
||||
Logs only record events, but traces record all that plus the start and end events. This makes following traces much more logical.
|
||||
|
||||
## The flow
|
||||
|
||||
- Create a new span, attach some values to it. These values are key-value pairs.
|
||||
- We explicitly step into the span with `.enter()`.
|
||||
- `.enter()` returns an instance of Entered, a guard: as long the guard variable is not dropped all downstream spans and log events will be registered as children of the entered span. And then the compiler will drop these for us.
|
||||
|
||||
## Notations
|
||||
- enter the span (->);
|
||||
- We exit the span (<-);
|
||||
- We finally close the span (--).
|
||||
|
||||
## Instrumenting
|
||||
|
||||
When we think about an async task, the async executor (in our case, the `tokio` async runtime) will have to poll the futures multiple times to drive that future to completion. And while that future is idle, we will do work on other futures.
|
||||
|
||||
We then need to think about how to not mix the spans of the futures. This is where instrument comes in. It is an extension trait for futures. `Instrument::instrument` does exactly what we want: enters the span we pass as argument every time self, the future, is polled; it exits the span every time the future is parked.
|
||||
|
||||
## Notes
|
||||
|
||||
- We can enter and exit the span multiple times. But can only close once. This is good for async tasks as we will enter and resume async tasks.
|
@@ -1,41 +1,113 @@
|
||||
use secrecy::{ExposeSecret, Secret};
|
||||
use serde_aux::field_attributes::deserialize_number_from_string;
|
||||
use sqlx::postgres::{PgConnectOptions, PgSslMode};
|
||||
use sqlx::ConnectOptions;
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct Settings {
|
||||
pub database: DatabaseSettings,
|
||||
pub application_port: u16,
|
||||
pub application: ApplicationSettings,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct ApplicationSettings {
|
||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||
pub port: u16,
|
||||
pub host: String,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct DatabaseSettings {
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub password: Secret<String>,
|
||||
#[serde(deserialize_with = "deserialize_number_from_string")]
|
||||
pub port: u16,
|
||||
pub host: String,
|
||||
pub database_name: String,
|
||||
pub require_ssl: bool,
|
||||
}
|
||||
|
||||
pub fn get_configuration() -> Result<Settings, config::ConfigError> {
|
||||
let base_path = std::env::current_dir().expect("Failed to determine the current directory");
|
||||
let configuration_directory = base_path.join("configuration");
|
||||
|
||||
let environment: Environment = std::env::var("APP_ENVIRONMENT")
|
||||
.unwrap_or_else(|_| "local".into())
|
||||
.try_into()
|
||||
.expect("Failed to parse APP_ENVIRONMENT.");
|
||||
|
||||
let environment_filename = format!("{}.yaml", environment.as_str());
|
||||
|
||||
let settings = config::Config::builder()
|
||||
.add_source(config::File::new(
|
||||
"configuration.yaml",
|
||||
config::FileFormat::Yaml,
|
||||
.add_source(config::File::from(
|
||||
configuration_directory.join("base.yaml"),
|
||||
))
|
||||
.add_source(config::File::from(
|
||||
configuration_directory.join(environment_filename),
|
||||
))
|
||||
// take settings from environment variables
|
||||
// with a prefix of APP and __ as separator
|
||||
//
|
||||
// E.g `APP_APPLICATION_PORT=5001` would set Settings.application.port
|
||||
.add_source(
|
||||
config::Environment::with_prefix("APP")
|
||||
.prefix_separator("_")
|
||||
.separator("__"),
|
||||
)
|
||||
.build()?;
|
||||
|
||||
settings.try_deserialize::<Settings>()
|
||||
}
|
||||
|
||||
impl DatabaseSettings {
|
||||
pub fn connection_string(&self) -> String {
|
||||
format!(
|
||||
"postgres://{}:{}@{}:{}/{}",
|
||||
self.username, self.password, self.host, self.port, self.database_name
|
||||
)
|
||||
/// The possible runtime environment for our application.
|
||||
pub enum Environment {
|
||||
Local,
|
||||
Production,
|
||||
}
|
||||
impl Environment {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
Environment::Local => "local",
|
||||
Environment::Production => "production",
|
||||
}
|
||||
|
||||
pub fn connection_string_without_db(&self) -> String {
|
||||
format!(
|
||||
"postgres://{}:{}@{}:{}",
|
||||
self.username, self.password, self.host, self.port
|
||||
)
|
||||
}
|
||||
}
|
||||
impl TryFrom<String> for Environment {
|
||||
type Error = String;
|
||||
fn try_from(s: String) -> Result<Self, Self::Error> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"local" => Ok(Self::Local),
|
||||
"production" => Ok(Self::Production),
|
||||
other => Err(format!(
|
||||
"{} is not a supported environment. \
|
||||
Use either `local` or `production`.",
|
||||
other
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DatabaseSettings {
|
||||
// for normal usage
|
||||
pub fn with_db(&self) -> PgConnectOptions {
|
||||
let mut options = self.without_db().database(&self.database_name);
|
||||
options = options.log_statements(tracing_log::log::LevelFilter::Trace);
|
||||
options
|
||||
}
|
||||
|
||||
// for testings, we will set the database name with arbitrary values
|
||||
pub fn without_db(&self) -> PgConnectOptions {
|
||||
let ssl_mode = if self.require_ssl {
|
||||
PgSslMode::Require
|
||||
} else {
|
||||
PgSslMode::Prefer
|
||||
};
|
||||
|
||||
PgConnectOptions::new()
|
||||
.host(&self.host)
|
||||
.username(&self.username)
|
||||
.password(self.password.expose_secret())
|
||||
.port(self.port)
|
||||
.ssl_mode(ssl_mode)
|
||||
}
|
||||
}
|
||||
|
7
src/domain/mod.rs
Normal file
7
src/domain/mod.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
mod new_subscriber;
|
||||
mod subscriber_email;
|
||||
mod subscriber_name;
|
||||
|
||||
pub use new_subscriber::NewSubscriber;
|
||||
pub use subscriber_email::SubscriberEmail;
|
||||
pub use subscriber_name::SubscriberName;
|
7
src/domain/new_subscriber.rs
Normal file
7
src/domain/new_subscriber.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
use crate::domain::subscriber_email::SubscriberEmail;
|
||||
use crate::domain::subscriber_name::SubscriberName;
|
||||
|
||||
pub struct NewSubscriber {
|
||||
pub email: SubscriberEmail,
|
||||
pub name: SubscriberName,
|
||||
}
|
42
src/domain/subscriber_email.rs
Normal file
42
src/domain/subscriber_email.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use validator::ValidateEmail;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SubscriberEmail(String);
|
||||
|
||||
impl SubscriberEmail {
|
||||
pub fn parse(string_input: String) -> Result<SubscriberEmail, String> {
|
||||
if string_input.validate_email() {
|
||||
Ok(Self(string_input))
|
||||
} else {
|
||||
Err(format!("{} is not a valid email.", string_input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for SubscriberEmail {
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::domain::subscriber_email::SubscriberEmail;
|
||||
use fake::faker::internet::en::SafeEmail;
|
||||
use fake::Fake;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct ValidEmailFixture(pub String);
|
||||
|
||||
impl quickcheck::Arbitrary for ValidEmailFixture {
|
||||
fn arbitrary<G: quickcheck::Gen>(g: &mut G) -> Self {
|
||||
let email = SafeEmail().fake_with_rng(g);
|
||||
Self(email)
|
||||
}
|
||||
}
|
||||
|
||||
#[quickcheck_macros::quickcheck]
|
||||
fn valid_emails_are_parsed_successfully(valid_email: ValidEmailFixture) -> bool {
|
||||
SubscriberEmail::parse(valid_email.0).is_ok()
|
||||
}
|
||||
}
|
69
src/domain/subscriber_name.rs
Normal file
69
src/domain/subscriber_name.rs
Normal file
@@ -0,0 +1,69 @@
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SubscriberName(String);
|
||||
|
||||
impl SubscriberName {
|
||||
pub fn parse(string_input: String) -> Result<SubscriberName, String> {
|
||||
let is_empty_or_whitespace = string_input.trim().is_empty();
|
||||
|
||||
let is_too_long = string_input.graphemes(true).count() > 256;
|
||||
|
||||
let forbidden_chars = ['/', '(', ')', '"', '<', '>', '\\', '{', '}'];
|
||||
|
||||
let contains_fobidden_chars = string_input.chars().any(|g| forbidden_chars.contains(&g));
|
||||
|
||||
if is_empty_or_whitespace || is_too_long || contains_fobidden_chars {
|
||||
Err(format!("{} is not a valid subscriber name.", string_input))
|
||||
} else {
|
||||
Ok(Self(string_input))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<str> for SubscriberName {
|
||||
fn as_ref(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::domain::SubscriberName;
|
||||
use claims::{assert_err, assert_ok};
|
||||
|
||||
#[test]
|
||||
fn a_256_grapheme_long_name_is_valid() {
|
||||
let name = "ё".repeat(256);
|
||||
assert_ok!(SubscriberName::parse(name));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn a_name_longer_than_256_graphemes_is_rejected() {
|
||||
let name = "a".repeat(257);
|
||||
assert_err!(SubscriberName::parse(name));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn whitespace_only_names_are_rejected() {
|
||||
let name = " ".to_string();
|
||||
assert_err!(SubscriberName::parse(name));
|
||||
}
|
||||
#[test]
|
||||
fn empty_string_is_rejected() {
|
||||
let name = "".to_string();
|
||||
assert_err!(SubscriberName::parse(name));
|
||||
}
|
||||
#[test]
|
||||
fn names_containing_an_invalid_character_are_rejected() {
|
||||
for name in &['/', '(', ')', '"', '<', '>', '\\', '{', '}'] {
|
||||
let name = name.to_string();
|
||||
assert_err!(SubscriberName::parse(name));
|
||||
}
|
||||
}
|
||||
#[test]
|
||||
fn a_valid_name_is_parsed_successfully() {
|
||||
let name = "Ursula Le Guin".to_string();
|
||||
assert_ok!(SubscriberName::parse(name));
|
||||
}
|
||||
}
|
@@ -1,3 +1,5 @@
|
||||
pub mod configuration;
|
||||
pub mod domain;
|
||||
pub mod routes;
|
||||
pub mod startup;
|
||||
pub mod telemetry;
|
||||
|
26
src/main.rs
26
src/main.rs
@@ -1,20 +1,32 @@
|
||||
use std::net::TcpListener;
|
||||
|
||||
use email_newsletter_api::telemetry::{get_subscriber, init_subscriber};
|
||||
use email_newsletter_api::{configuration::get_configuration, startup};
|
||||
use sqlx::PgPool;
|
||||
use sqlx::postgres::PgPoolOptions;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), std::io::Error> {
|
||||
let configuration = get_configuration().expect("Failed to read configuration");
|
||||
|
||||
let db_conn = PgPool::connect(&configuration.database.connection_string())
|
||||
.await
|
||||
.expect("Failed to connect to PostgreSQL");
|
||||
let subscriber = get_subscriber(
|
||||
"email_newsletter_api".into(),
|
||||
"info".into(),
|
||||
std::io::stdout,
|
||||
);
|
||||
init_subscriber(subscriber);
|
||||
|
||||
let port_number = configuration.application_port;
|
||||
let db_conn = PgPoolOptions::new().connect_lazy_with(configuration.database.with_db());
|
||||
|
||||
let listener = TcpListener::bind(format!("127.0.0.1:{}", port_number))
|
||||
.unwrap_or_else(|_| panic!("Can't bind to port {} at localhost", port_number));
|
||||
let listener = TcpListener::bind(format!(
|
||||
"{}:{}",
|
||||
configuration.application.host, configuration.application.port
|
||||
))
|
||||
.unwrap_or_else(|_| {
|
||||
panic!(
|
||||
"Can't bind to port {} at localhost",
|
||||
configuration.application.port
|
||||
)
|
||||
});
|
||||
|
||||
// Move the error up the call stack
|
||||
// otherwise await for the HttpServer
|
||||
|
@@ -1,3 +1,4 @@
|
||||
use crate::domain::{NewSubscriber, SubscriberEmail, SubscriberName};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use chrono::Utc;
|
||||
use sqlx::PgPool;
|
||||
@@ -9,27 +10,64 @@ pub struct FormData {
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl TryFrom<FormData> for NewSubscriber {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: FormData) -> Result<Self, Self::Error> {
|
||||
let name = SubscriberName::parse(value.name)?;
|
||||
let email = SubscriberEmail::parse(value.email)?;
|
||||
Ok(Self { email, name })
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
name = "Adding a new subscriber",
|
||||
// functions args isn't really relevant to the span
|
||||
skip(form, db_conn_pool),
|
||||
fields(
|
||||
subscriber_email = %form.email,
|
||||
subscriber_name = %form.name
|
||||
)
|
||||
)]
|
||||
pub async fn subscribe_route(
|
||||
form: web::Form<FormData>,
|
||||
db_conn_pool: web::Data<PgPool>,
|
||||
) -> HttpResponse {
|
||||
match sqlx::query!(
|
||||
let new_subscriber = match form.0.try_into() {
|
||||
Ok(form) => form,
|
||||
Err(_) => return HttpResponse::BadRequest().finish(),
|
||||
};
|
||||
match insert_subscriber(&db_conn_pool, &new_subscriber).await {
|
||||
Ok(_) => HttpResponse::Ok().finish(),
|
||||
Err(_) => HttpResponse::InternalServerError().finish(),
|
||||
}
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
name = "Saving new subscriber details in the database",
|
||||
skip(new_subscriber, pool)
|
||||
)]
|
||||
pub async fn insert_subscriber(
|
||||
pool: &PgPool,
|
||||
new_subscriber: &NewSubscriber,
|
||||
) -> Result<(), sqlx::Error> {
|
||||
sqlx::query!(
|
||||
r#"
|
||||
INSERT INTO subscriptions (id, email, name, subscribed_at)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
"#,
|
||||
Uuid::new_v4(),
|
||||
form.email,
|
||||
form.name,
|
||||
new_subscriber.email.as_ref(),
|
||||
new_subscriber.name.as_ref(),
|
||||
Utc::now()
|
||||
)
|
||||
.execute(db_conn_pool.get_ref())
|
||||
.execute(pool)
|
||||
.await
|
||||
{
|
||||
Ok(_) => HttpResponse::Ok().finish(),
|
||||
Err(e) => {
|
||||
println!("Failed to execute query: {}", e);
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
.map_err(|e| {
|
||||
// Using the `?` operator to return early
|
||||
// if the function failed, returning a sqlx::Error
|
||||
tracing::error!("Failed to execute query: {:?}", e);
|
||||
e
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
@@ -3,6 +3,7 @@ use actix_web::dev::Server;
|
||||
use actix_web::{web, App, HttpServer};
|
||||
use sqlx::PgPool;
|
||||
use std::net::TcpListener;
|
||||
use tracing_actix_web::TracingLogger;
|
||||
|
||||
pub fn run(listener: TcpListener, db_conn_pool: PgPool) -> Result<Server, std::io::Error> {
|
||||
// under the hood, web::Data::new will create an Arc
|
||||
@@ -11,8 +12,9 @@ pub fn run(listener: TcpListener, db_conn_pool: PgPool) -> Result<Server, std::i
|
||||
|
||||
let server = HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(TracingLogger::default())
|
||||
.route("/health_check", web::get().to(healthcheck_route))
|
||||
.route("/subscribe", web::post().to(subscribe_route))
|
||||
.route("/subscriptions", web::post().to(subscribe_route))
|
||||
.app_data(db_conn_pool.clone())
|
||||
})
|
||||
.listen(listener)?
|
||||
|
31
src/telemetry.rs
Normal file
31
src/telemetry.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use tracing::subscriber::set_global_default;
|
||||
use tracing::Subscriber;
|
||||
use tracing_bunyan_formatter::{BunyanFormattingLayer, JsonStorageLayer};
|
||||
use tracing_log::LogTracer;
|
||||
use tracing_subscriber::{fmt::MakeWriter, layer::SubscriberExt, EnvFilter, Registry};
|
||||
|
||||
pub fn get_subscriber<Sink>(
|
||||
name: String,
|
||||
env_filter: String,
|
||||
sink: Sink,
|
||||
) -> impl Subscriber + Send + Sync
|
||||
where
|
||||
Sink: for<'a> MakeWriter<'a> + Send + Sync + 'static,
|
||||
{
|
||||
let env_filter =
|
||||
EnvFilter::try_from_default_env().unwrap_or_else(|_| EnvFilter::new(env_filter));
|
||||
let formatting_layer = BunyanFormattingLayer::new(name, sink);
|
||||
Registry::default()
|
||||
.with(env_filter)
|
||||
.with(JsonStorageLayer)
|
||||
.with(formatting_layer)
|
||||
}
|
||||
|
||||
// init_subscriber should only be called once
|
||||
//
|
||||
// This is solved with the once_cell crate
|
||||
// until the std::sync::SyncOnceCell is stable in the toolchain
|
||||
pub fn init_subscriber(subscriber: impl Subscriber + Send + Sync) {
|
||||
LogTracer::init().expect("Failed to set logger");
|
||||
set_global_default(subscriber).expect("Failed to set subscriber");
|
||||
}
|
@@ -11,7 +11,7 @@ async fn subscribe_returns_a_200_for_valid_form_data() {
|
||||
let body = "name=le%20test&email=le_test%40gmail.com";
|
||||
|
||||
let response = client
|
||||
.post(&format!("{}/subscribe", &test_app.address))
|
||||
.post(&format!("{}/subscriptions", &test_app.address))
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.body(body)
|
||||
.send()
|
||||
@@ -44,7 +44,7 @@ async fn subscribe_returns_a_400_when_data_is_missing() {
|
||||
|
||||
for (invalid_body, error_message) in test_cases {
|
||||
let response = client
|
||||
.post(&format!("{}/subscribe", &test_app.address))
|
||||
.post(&format!("{}/subscriptions", &test_app.address))
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.body(invalid_body)
|
||||
.send()
|
||||
@@ -59,3 +59,33 @@ async fn subscribe_returns_a_400_when_data_is_missing() {
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn subscribe_returns_a_400_when_fields_are_present_but_invalid() {
|
||||
// Arrange
|
||||
let app = spawn_app().await;
|
||||
let client = reqwest::Client::new();
|
||||
let test_cases = vec![
|
||||
("name=&email=ursula_le_guin%40gmail.com", "empty name"),
|
||||
("name=Ursula&email=", "empty email"),
|
||||
("name=Ursula&email=definitely-not-an-email", "invalid email"),
|
||||
];
|
||||
|
||||
for (invalid_body, error_message) in test_cases {
|
||||
// Act
|
||||
let response = client
|
||||
.post(&format!("{}/subscriptions", &app.address))
|
||||
.header("Content-Type", "application/x-www-form-urlencoded")
|
||||
.body(invalid_body)
|
||||
.send()
|
||||
.await
|
||||
.expect("Failed to execute request.");
|
||||
|
||||
assert_eq!(
|
||||
400,
|
||||
response.status().as_u16(),
|
||||
"The API did not return a 400 Bad Request when the payload was {}.",
|
||||
error_message
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,8 +1,22 @@
|
||||
use email_newsletter_api::configuration::{get_configuration, DatabaseSettings};
|
||||
use email_newsletter_api::{
|
||||
configuration::{get_configuration, DatabaseSettings},
|
||||
telemetry::{get_subscriber, init_subscriber},
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
use sqlx::{Connection, Executor, PgConnection, PgPool};
|
||||
use std::net::TcpListener;
|
||||
use uuid::Uuid;
|
||||
|
||||
static TRACING: Lazy<()> = Lazy::new(|| {
|
||||
if std::env::var("TEST_LOG").is_ok() {
|
||||
let subscriber = get_subscriber("test".into(), "info".into(), std::io::stdout);
|
||||
init_subscriber(subscriber);
|
||||
} else {
|
||||
let subscriber = get_subscriber("test".into(), "debug".into(), std::io::sink);
|
||||
init_subscriber(subscriber);
|
||||
}
|
||||
});
|
||||
|
||||
pub struct TestApp {
|
||||
pub address: String,
|
||||
pub db_pool: PgPool,
|
||||
@@ -10,6 +24,8 @@ pub struct TestApp {
|
||||
|
||||
#[allow(clippy::let_underscore_future)]
|
||||
pub async fn spawn_app() -> TestApp {
|
||||
Lazy::force(&TRACING);
|
||||
|
||||
/* Spawn a app server with a TcpListener bound to localhost:<random port>
|
||||
*
|
||||
* Returns a valid IPv4 string (i.e localhost:8080)
|
||||
@@ -41,7 +57,7 @@ pub async fn spawn_app() -> TestApp {
|
||||
}
|
||||
|
||||
pub async fn configure_test_database(db_config: &DatabaseSettings) -> PgPool {
|
||||
let mut connection = PgConnection::connect(&db_config.connection_string_without_db())
|
||||
let mut connection = PgConnection::connect_with(&db_config.without_db())
|
||||
.await
|
||||
.expect("Failed to connect to Postgres");
|
||||
|
||||
@@ -50,7 +66,7 @@ pub async fn configure_test_database(db_config: &DatabaseSettings) -> PgPool {
|
||||
.await
|
||||
.expect("Failed to create database");
|
||||
|
||||
let conn_pool = PgPool::connect(&db_config.connection_string())
|
||||
let conn_pool = PgPool::connect_with(db_config.with_db())
|
||||
.await
|
||||
.expect("Failed to connect to PostgreSQL pool");
|
||||
|
||||
|
Reference in New Issue
Block a user