Compare commits

...

28 Commits

Author SHA1 Message Date
Martin Berg Alstad
de3882c8b1
🧹 Removed unused dependencies
Some checks failed
Build & test / build (push) Failing after 4s
Signed-off-by: Martin Berg Alstad <600878@stud.hvl.no>
2024-12-22 16:30:08 +01:00
61da72936e
Changed path to name of git repository, to work when cloning
Signed-off-by: Martin Berg Alstad <600878@stud.hvl.no>
2024-12-22 16:24:23 +01:00
Martin Berg Alstad
7e2df67fee Comments.
Coverage to Makefile.toml.

Added branch to map! to allow for instantiating with default values.
2024-08-31 17:49:27 +02:00
Martin Berg Alstad
8fb89e0459 Makefile.toml
TestContainers and diesel test database
2024-08-31 12:21:59 +02:00
Martin Berg Alstad
ce770e9c6f Comments 2024-08-27 00:21:25 +02:00
Martin Berg Alstad
cd99466266 Added dependency on mime 2024-08-27 00:04:21 +02:00
Martin Berg Alstad
93fe1e0cda Build method to AppBuilder that returns a Router of all settings.
Updated router! to allow for generic types
2024-08-26 17:14:39 +02:00
Martin Berg Alstad
eeab1257e3 InsertMany function for create trait.
Changed path to DateTimeInterval.

Changed lifetimes on derives with async_trait
2024-08-25 17:56:56 +02:00
Martin Berg Alstad
762330189d More derives for types.
Addee new_safe constructor for Interval type
2024-08-25 00:55:14 +02:00
Martin Berg Alstad
17c81f4da1 Rewritten diesel_crud to be more flexible 2024-08-24 19:29:54 +02:00
Martin Berg Alstad
3318aacf7c Added more derives to DateTimeInterval 2024-08-24 19:29:19 +02:00
Martin Berg Alstad
00e894140f Added Wrapper types and DateTimeInterval 2024-08-19 23:15:55 +02:00
Martin Berg Alstad
ae775f4e9e DieselCrud traits and derives 2024-08-19 19:55:01 +02:00
Martin Berg Alstad
2c8577a11d Added layer to builder. Comments 2024-08-19 16:43:31 +02:00
Martin Berg Alstad
7eb675c210 Updated dependencies. Tower from 0.4 -> 0.5 2024-08-19 14:18:51 +02:00
Martin Berg Alstad
10d8535b27 Replaced From<T> with from! for BaseResponse 2024-08-19 14:15:29 +02:00
Martin Berg Alstad
cd558eebfa Added From<T> for BaseResponse 2024-08-14 20:37:10 +02:00
Martin Berg Alstad
347314460a Sanitize impl in macro 2024-08-05 01:49:28 +02:00
Martin Berg Alstad
ee487540ac Sanitize returntype in macro 2024-08-05 01:46:35 +02:00
Martin Berg Alstad
865cc6ddb9 Fix version requirement for regex 2024-07-21 19:47:15 +02:00
Martin Berg Alstad
15de73dad4 Fix cfg for external crate 2024-07-21 19:43:58 +02:00
Martin Berg Alstad
8cbb2757a5 Fixed version requirement 2024-07-17 13:02:09 +02:00
Martin Berg Alstad
3389b2264e Removed trim fron inner parenthesized 2024-07-17 12:48:09 +02:00
Martin Berg Alstad
5cd1c075a5 Read files macro for loading reading files to string at compile-time
Makefile for formatting and linting

Workspace for subcrates.

Moved crates to subdir and moved subcrate configs to workspace.*
2024-07-16 18:29:32 +02:00
Martin Berg Alstad
7a0cf00cbc Accept IpAddr or Ipv6Addr as socket.
Added port in builder for only specifying port
2024-07-07 15:13:13 +02:00
Martin Berg Alstad
971556af64 Merge remote-tracking branch 'origin/master' 2024-07-06 13:27:38 +02:00
Martin Berg Alstad
f40c87aa8e Changed router visibility to pub 2024-07-06 13:27:24 +02:00
Martin Berg Alstad
b685d81e00 comment 2024-07-03 11:24:28 +02:00
60 changed files with 3392 additions and 406 deletions

View File

@ -15,4 +15,4 @@ jobs:
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose --all-features
run: cargo test --verbose --all-features --workspace

13
.idea/lib.iml generated
View File

@ -2,13 +2,22 @@
<module type="EMPTY_MODULE" version="4">
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$">
<sourceFolder url="file://$MODULE_DIR$/derive/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/examples/multipart_file/src" isTestSource="false" />
<excludeFolder url="file://$MODULE_DIR$/derive/target" />
<sourceFolder url="file://$MODULE_DIR$/crates/into_response_derive/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/crates/read_files/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/crates/diesel_crud_derive/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/diesel_crud_trait/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/tests/src" isTestSource="false" />
<sourceFolder url="file://$MODULE_DIR$/crates/tests/tests" isTestSource="true" />
<sourceFolder url="file://$MODULE_DIR$/tests" isTestSource="true" />
<excludeFolder url="file://$MODULE_DIR$/target" />
<excludeFolder url="file://$MODULE_DIR$/examples/multipart_file/target" />
<excludeFolder url="file://$MODULE_DIR$/crates/into_response_derive/target" />
<excludeFolder url="file://$MODULE_DIR$/crates/read_files/target" />
</content>
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />

20
.idea/runConfigurations/All_Tests.xml generated Normal file
View File

@ -0,0 +1,20 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="All Tests" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="buildProfile" value="Test" />
<option name="command" value="test --workspace" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" />
<option name="allFeatures" value="true" />
<option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" />
<option name="isRedirectInput" value="false" />
<option name="redirectInputPath" value="" />
<method v="2">
<option name="CARGO.BUILD_TASK_PROVIDER" enabled="true" />
</method>
</configuration>
</component>

View File

@ -1,12 +1,12 @@
<component name="ProjectRunConfigurationManager">
<configuration default="false" name="Test" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="test --package lib --lib tests --all-features" />
<configuration default="false" name="Release" type="CargoCommandRunConfiguration" factoryName="Cargo Command">
<option name="command" value="build --release --all-features" />
<option name="workingDirectory" value="file://$PROJECT_DIR$" />
<envs />
<option name="emulateTerminal" value="true" />
<option name="channel" value="DEFAULT" />
<option name="requiredFeatures" value="true" />
<option name="allFeatures" value="false" />
<option name="allFeatures" value="true" />
<option name="withSudo" value="false" />
<option name="buildTarget" value="REMOTE" />
<option name="backtrace" value="SHORT" />

6
.idea/rust.xml generated Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RsVcsConfiguration">
<option name="rustFmt" value="true" />
</component>
</project>

1832
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -1,36 +1,80 @@
[package]
name = "lib"
version = "1.3.3"
[workspace]
members = ["crates/*"]
exclude = ["examples"]
[workspace.package]
edition = "2021"
rust-version = "1.80.1"
authors = ["Martin Berg Alstad"]
homepage = "emberal.github.io"
[package]
name = "lib"
version = "1.4.3"
description = "A library with utilities and helper fuctions."
edition = { workspace = true }
rust-version = { workspace = true }
authors = { workspace = true }
homepage = { workspace = true }
[lib]
[dependencies]
# Api
axum = { version = "0.7.5", optional = true, features = ["multipart"] }
tower = { version = "0.4.13", optional = true }
tower-http = { version = "0.5.2", optional = true, features = ["trace", "cors", "normalize-path"] }
axum = { version = "0.7", optional = true, features = ["multipart"] }
tower = { version = "0.5", optional = true }
tower-http = { version = "0.5", optional = true, features = ["trace", "cors", "normalize-path"] }
mime = { version = "0.3", optional = true }
# Async
tokio = { version = "1.38.0", optional = true, features = ["fs"] }
tokio-util = { version = "0.7.11", optional = true, features = ["io"] }
tokio = { workspace = true, optional = true, features = ["fs", "rt-multi-thread"] }
tokio-util = { version = "0.7", optional = true, features = ["io"] }
# Database
diesel = { workspace = true, optional = true, features = ["postgres"] }
diesel-async = { workspace = true, optional = true, features = ["postgres", "deadpool"] }
diesel-crud-derive = { path = "crates/diesel_crud_derive", optional = true }
diesel-crud-trait = { path = "crates/diesel_crud_trait", optional = true }
deadpool-diesel = { workspace = true, optional = true, features = ["postgres"] }
# Error handling
thiserror = { version = "1.0.61", optional = true }
thiserror = { workspace = true, optional = true }
# Logging
tracing = { version = "0.1.40", optional = true }
tracing-subscriber = { version = "0.3.18", optional = true }
tracing = { version = "0.1", optional = true }
tracing-subscriber = { version = "0.3", optional = true }
# Parsing
nom = { version = "7.1.3", optional = true }
nom = { version = "7.1", optional = true }
# Procedural macros
into-response-derive = { path = "crates/into_response_derive", optional = true }
read-files = { path = "crates/read_files", optional = true }
# Serialization / Deserialization
serde = { version = "1.0.203", optional = true, features = ["derive"] }
# Derive macros
derive = { path = "derive", optional = true }
serde = { version = "1.0", optional = true, features = ["derive"] }
# Time
chrono = { version = "0.4", optional = true, features = ["serde"] }
# Utils
derive_more = { workspace = true, features = ["from", "constructor"] }
[workspace.dependencies]
# Async
tokio = "1.40"
# Database
diesel = "2.2"
diesel-async = "0.5"
deadpool-diesel = "0.6"
# Error handling
thiserror = "1.0"
# Procedural macros
syn = "2.0"
quote = "1.0"
deluxe = "0.5"
proc-macro2 = "1.0"
# Utils
derive_more = "1.0"
[features]
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio"]
axum = ["dep:axum", "dep:tower", "dep:tower-http", "dep:thiserror", "dep:tracing", "dep:tracing-subscriber", "dep:tokio", "dep:mime"]
diesel = ["dep:diesel-crud-trait", "dep:diesel", "dep:diesel-async", "dep:deadpool-diesel"]
io = ["dep:tokio", "dep:tokio-util"]
iter = []
nom = ["dep:nom"]
serde = ["dep:serde"]
derive = ["dep:derive", "axum", "serde"]
derive = ["dep:into-response-derive", "dep:diesel-crud-derive"]
read-files = ["dep:read-files"]
time = ["dep:chrono"]

15
Makefile.toml Normal file
View File

@ -0,0 +1,15 @@
[tasks.clippy]
command = "cargo"
args = ["clippy", "--all-targets", "--all-features", "--", "-D", "warnings"]
[tasks.fmt]
command = "cargo"
args = ["fmt", "--all"]
[tasks.test]
command = "cargo"
args = ["test", "--all-features"]
[tasks.coverage]
command = "cargo"
args = ["llvm-cov", "--all-features"]

3
README.md Normal file
View File

@ -0,0 +1,3 @@
# Lib
-_-

View File

@ -0,0 +1,14 @@
[package]
name = "diesel-crud-derive"
version = "0.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }
deluxe = { workspace = true }
proc-macro2 = { workspace = true }

View File

@ -0,0 +1,73 @@
use crate::common::PrimaryKey;
use deluxe::{extract_attributes, ExtractAttributes};
use proc_macro2::Ident;
use quote::quote;
use std::collections::HashMap;
use syn::spanned::Spanned;
use syn::{Data, DeriveInput, Expr, Path, Type};
#[derive(ExtractAttributes)]
#[deluxe(attributes(diesel))]
pub(crate) struct DieselStructAttributes {
table_name: Option<Expr>,
#[deluxe(rest)]
_rest: HashMap<Path, Expr>,
}
#[derive(ExtractAttributes)]
#[deluxe(attributes(diesel_crud))]
pub(crate) struct StructAttributes {
table: Option<Expr>,
#[deluxe(default)]
insert: Option<Type>,
#[deluxe(default)]
update: Option<Type>,
}
#[derive(ExtractAttributes)]
#[deluxe(attributes(diesel_crud))]
pub(crate) struct FieldAttributes(#[allow(unused)] Expr);
pub(crate) struct Attributes {
pub struct_ident: Ident,
pub table: Expr,
pub insert: Type,
pub update: Type,
pub pk: Option<PrimaryKey>,
}
pub(crate) fn extract_attrs(ast: &mut DeriveInput) -> deluxe::Result<Attributes> {
let struct_attributes: StructAttributes = extract_attributes(ast)?;
let diesel_attributes: DieselStructAttributes = extract_attributes(ast)?;
Ok(Attributes {
struct_ident: ast.ident.clone(),
table: diesel_attributes.table_name.unwrap_or_else(|| {
struct_attributes
.table
.expect("Table name should be provided on either diesel or diesel_crud attribute")
}),
insert: struct_attributes
.insert
.unwrap_or_else(|| Type::Verbatim(quote! { Self })),
update: struct_attributes
.update
.unwrap_or_else(|| Type::Verbatim(quote! { Self })),
pk: extract_field_attrs(ast).ok(),
})
}
fn extract_field_attrs(ast: &mut DeriveInput) -> deluxe::Result<PrimaryKey> {
if let Data::Struct(data_struct) = &mut ast.data {
for field in data_struct.fields.iter_mut() {
if let Ok(FieldAttributes(_)) = extract_attributes(field) {
return Ok(PrimaryKey {
ident: field.ident.clone().unwrap(),
ty: field.ty.clone(),
});
}
}
} else {
return Err(deluxe::Error::new(ast.span(), "Expected a struct"));
};
Err(deluxe::Error::new(ast.span(), "Primary key not found"))
}

View File

@ -0,0 +1,14 @@
use proc_macro2::Ident;
use quote::quote;
use syn::Type;
pub(crate) struct PrimaryKey {
pub ident: Ident,
pub ty: Type,
}
pub(crate) fn return_type(output: proc_macro2::TokenStream) -> proc_macro2::TokenStream {
quote! {
std::pin::Pin<Box<dyn core::future::Future<Output = Result<#output, lib::diesel_crud_trait::CrudError>> + Send + 'async_trait>>
}
}

View File

@ -0,0 +1,53 @@
use crate::{common, Attributes};
use quote::quote;
pub(crate) fn derive_diesel_crud_create_impl(
Attributes {
struct_ident,
table,
insert,
..
}: &Attributes,
) -> proc_macro2::TokenStream {
let return_type = common::return_type(quote! { Self });
let many_return_type = common::return_type(quote! { Vec<Self> });
quote! {
#[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudCreate<#table::table> for #struct_ident {
type Insert = #insert;
fn insert<'a, 'async_trait>(insert: Self::Insert, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result(
diesel::dsl::insert_into(#table::table::table()).values(insert),
conn
)
.await
.map_err(Into::into)
})
}
fn insert_many<'a, 'b, 'async_trait>(insert: &'a [Self::Insert], conn: &'b mut diesel_async::AsyncPgConnection) -> #many_return_type
where
Self: Sized + Sync + 'async_trait,
'a: 'async_trait,
'b: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_results(
diesel::dsl::insert_into(#table::table::table()).values(insert),
conn
)
.await
.map_err(Into::into)
})
}
}
}
}

View File

@ -0,0 +1,46 @@
use crate::{common, Attributes, PrimaryKey};
use quote::quote;
pub(crate) fn derive_diesel_crud_delete_impl(
Attributes {
struct_ident,
table,
pk,
..
}: &Attributes,
) -> proc_macro2::TokenStream {
if pk.is_none() {
panic!("Please specify a primary key using #[diesel_crud(pk)]");
}
let PrimaryKey {
ident: pk_ident,
ty: pk_type,
} = pk.as_ref().unwrap();
let return_type = common::return_type(quote! { Self });
quote! {
#[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudDelete for #struct_ident {
type PK = #pk_type;
fn delete<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::QueryDsl;
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result(
diesel::delete(
#table::table
.filter(diesel::expression_methods::ExpressionMethods::eq(#table::#pk_ident, pk))
),
conn,
)
.await
.map_err(Into::into)
})
}
}
}
}

View File

@ -0,0 +1,165 @@
extern crate proc_macro;
use crate::attributes::{extract_attrs, Attributes};
use crate::common::PrimaryKey;
use crate::create::derive_diesel_crud_create_impl;
use crate::delete::derive_diesel_crud_delete_impl;
use crate::list::derive_diesel_crud_list_impl;
use crate::read::derive_diesel_crud_read_impl;
use crate::update::derive_diesel_crud_update_impl;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
mod attributes;
mod common;
mod create;
mod delete;
mod list;
mod read;
mod update;
/// Derives 5 functions for CRUD operations
/// 1. create
/// 2. read
/// 3. update
/// 4. delete
/// 5. list
#[proc_macro_derive(DieselCrud, attributes(diesel_crud))]
pub fn derive_diesel_crud(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
let create = derive_diesel_crud_create_impl(&attrs);
let read = derive_diesel_crud_read_impl(&attrs);
let update = derive_diesel_crud_update_impl(&attrs);
let delete = derive_diesel_crud_delete_impl(&attrs);
let list = derive_diesel_crud_list_impl(&attrs);
let Attributes {
table,
struct_ident,
..
} = attrs;
let expanded = quote! {
#create
#read
#update
#delete
#list
impl lib::diesel_crud_trait::DieselCrud<#table::table> for #struct_ident {}
};
expanded.into()
}
/// Derives the create function for CRUD operations.
/// Must be used on a struct.
/// # Struct Attributes
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
/// - insert: Type - The insertable model (Optional, defaults to `Self`)
/// # Example
/// ```ignore
/// #[derive(Queryable, diesel_crud_derive::DieselCrudCreate)]
/// #[diesel_crud(create = crate::models::InsertUser)]
/// #[diesel(table_name = crate::schema::user)]
/// struct User {
/// #[diesel_crud(pk)]
/// email: String,
/// password: String,
/// }
/// ```
#[proc_macro_derive(DieselCrudCreate, attributes(diesel_crud))]
pub fn derive_diesel_crud_create(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = syn::parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
derive_diesel_crud_create_impl(&attrs).into()
}
/// Derives the read function for CRUD operations.
/// Must be used on a struct with one field marked as the primary key.
/// # Struct Attributes
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
/// # Field Attributes
/// - pk: Ident - The primary key field (Only one field should be marked as the primary key)
/// # Example
/// ```ignore
/// #[derive(Queryable, diesel_crud_derive::DieselCrudRead)]
/// #[diesel(table_name = crate::schema::user)]
/// struct User {
/// #[diesel_crud(pk)]
/// email: String,
/// password: String,
/// }
/// ```
#[proc_macro_derive(DieselCrudRead, attributes(diesel_crud))]
pub fn derive_diesel_crud_read(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = syn::parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
derive_diesel_crud_read_impl(&attrs).into()
}
/// Derives the update function for CRUD operations.
/// Must be used on a struct.
/// # Struct Attributes
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
/// - update: Type - The update model (Optional, defaults to `Self`)
/// # Example
/// ```ignore
/// #[derive(Queryable, diesel_crud_derive::DieselCrudUpdate)]
/// #[diesel(table_name = crate::schema::user)]
/// struct User {
/// #[diesel_crud(pk)]
/// email: String,
/// password: String,
/// }
/// ```
#[proc_macro_derive(DieselCrudUpdate, attributes(diesel_crud))]
pub fn derive_diesel_crud_update(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = syn::parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
derive_diesel_crud_update_impl(&attrs).into()
}
/// Derives the delete function for CRUD operations.
/// Must be used on a struct with a field marked as primary key.
/// # Struct Attributes
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
/// # Field Attributes
/// - pk: Ident - The primary key field (Only one field should be marked as the primary key)
/// # Example
/// ```ignore
/// #[derive(Queryable, diesel_crud_derive::DieselCrudDelete)]
/// #[diesel(table_name = crate::schema::user)]
/// struct User {
/// #[diesel_crud(pk)]
/// email: String,
/// password: String,
/// }
/// ```
#[proc_macro_derive(DieselCrudDelete, attributes(diesel_crud))]
pub fn derive_diesel_crud_delete(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = syn::parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
derive_diesel_crud_delete_impl(&attrs).into()
}
/// Derives the list function for CRUD operations.
/// Must be used on a struct.
/// # Struct Attributes
/// - table: Expr - The schema struct for the table (can be provided on either diesel or diesel_crud attribute)
/// # Example
/// ```ignore
/// #[derive(Queryable, diesel_crud_derive::DieselCrudList)]
/// #[diesel(table_name = crate::schema::user)]
/// struct User {
/// #[diesel_crud(pk)]
/// email: String,
/// password: String,
/// }
/// ```
#[proc_macro_derive(DieselCrudList, attributes(diesel_crud))]
pub fn derive_diesel_crud_list(item: proc_macro::TokenStream) -> proc_macro::TokenStream {
let mut item = syn::parse_macro_input!(item as DeriveInput);
let attrs = extract_attrs(&mut item).unwrap();
derive_diesel_crud_list_impl(&attrs).into()
}

View File

@ -0,0 +1,28 @@
use crate::{common, Attributes};
use quote::quote;
pub(crate) fn derive_diesel_crud_list_impl(
Attributes {
struct_ident,
table,
..
}: &Attributes,
) -> proc_macro2::TokenStream {
let return_type = common::return_type(quote! { Vec<Self> });
quote! {
#[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudList for #struct_ident {
fn list<'a, 'async_trait>(conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait
{
Box::pin(async move {
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_results(#table::table::table(), conn).await.map_err(Into::into)
})
}
}
}
}

View File

@ -0,0 +1,40 @@
use crate::common::PrimaryKey;
use crate::{common, Attributes};
use quote::quote;
pub(crate) fn derive_diesel_crud_read_impl(
Attributes {
struct_ident,
table,
pk,
..
}: &Attributes,
) -> proc_macro2::TokenStream {
if pk.is_none() {
panic!("Please specify a primary key using #[diesel_crud(pk)]");
}
let PrimaryKey { ty: pk_type, .. } = pk.as_ref().unwrap();
let return_type = common::return_type(quote! { Self });
quote! {
#[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudRead for #struct_ident {
type PK = #pk_type;
fn read<'a, 'async_trait>(pk: Self::PK, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait
{
Box::pin(async move {
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result(
diesel::QueryDsl::find(#table::table::table(), pk),
conn
)
.await
.map_err(Into::into)
})
}
}
}
}

View File

@ -0,0 +1,35 @@
use crate::{common, Attributes};
use quote::quote;
pub(crate) fn derive_diesel_crud_update_impl(
Attributes {
struct_ident,
table,
update,
..
}: &Attributes,
) -> proc_macro2::TokenStream {
let return_type = common::return_type(quote! { Self });
quote! {
#[automatically_derived]
impl lib::diesel_crud_trait::DieselCrudUpdate for #struct_ident {
type Update = #update;
fn update<'a, 'async_trait>(update: Self::Update, conn: &'a mut diesel_async::AsyncPgConnection) -> #return_type
where
Self: Sized + Sync + 'a,
'a: 'async_trait,
{
Box::pin(async move {
use diesel::associations::HasTable;
diesel_async::RunQueryDsl::get_result(
diesel::dsl::update(#table::table::table()).set(update),
conn,
)
.await
.map_err(Into::into)
})
}
}
}
}

View File

@ -0,0 +1,12 @@
[package]
name = "diesel-crud-trait"
version = "0.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[dependencies]
diesel = { workspace = true, features = ["postgres"] }
diesel-async = { workspace = true, features = ["postgres", "deadpool"] }
async-trait = "0.1"
deadpool-diesel = { version = "0.6", features = ["postgres"] }
thiserror = "1.0"

View File

@ -0,0 +1,22 @@
use diesel::result::Error;
use thiserror::Error;
/// Error type for CRUD operations
#[derive(Debug, PartialEq, Error)]
pub enum CrudError {
#[error("Resource not found")]
NotFound,
#[error("Database pool error: {0}")]
PoolError(String),
#[error(transparent)]
Other(Error),
}
impl From<Error> for CrudError {
fn from(error: Error) -> Self {
match error {
Error::NotFound => CrudError::NotFound,
_ => CrudError::Other(error),
}
}
}

View File

@ -0,0 +1,118 @@
mod error;
use async_trait::async_trait;
use diesel::{AsChangeset, Insertable};
use diesel_async::AsyncPgConnection;
pub use error::CrudError;
/// Combines all CRUD operations into a single trait
/// Includes:
/// - Create
/// - Read
/// - Update
/// - Delete
/// - List
pub trait DieselCrud<Table>:
DieselCrudCreate<Table> + DieselCrudRead + DieselCrudUpdate + DieselCrudDelete + DieselCrudList
{
}
/// Insert an entity into the database
/// The entity must implement `Insertable<Table>` for the given table.
///
/// Implementing the trait requires the `async_trait` macro.
/// # Associations
/// - `Insert` - The type to insert, must implement `Insertable<Table>`
/// # Parameters
/// - `insert` - The entity to insert
/// - `conn` - The database connection
/// # Returns
/// A result containing the inserted entity or a `CrudError`
#[async_trait]
pub trait DieselCrudCreate<Table>
where
Self: Sized,
{
type Insert: Insertable<Table>;
async fn insert(insert: Self::Insert, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
async fn insert_many(
insert: &[Self::Insert],
conn: &mut AsyncPgConnection,
) -> Result<Vec<Self>, CrudError>;
}
/// Gets an entity from the database
///
/// Implementing the trait requires the `async_trait` macro.
/// # Associations
/// - `PK` - The primary key of the entity
/// # Parameters
/// - `pk` - The primary key of the entity
/// - `conn` - The database connection
/// # Returns
/// A result containing the entity or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudRead
where
Self: Sized,
{
type PK;
async fn read(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
}
/// Updates an entity in the database
/// The entity must implement `AsChangeset` for the given table.
///
/// Implementing the trait requires the `async_trait` macro.
/// # Associations
/// - `Update` - The type to update
/// # Parameters
/// - `update` - The update to apply
/// - `conn` - The database connection
/// # Returns
/// A result containing the old entry of the entity if successful or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudUpdate
where
Self: Sized,
{
type Update: AsChangeset;
async fn update(update: Self::Update, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
}
/// Deletes an entity from the database
///
/// Implementing the trait requires the `async_trait` macro.
/// # Associations
/// - `PK` - The primary key of the entity
/// # Parameters
/// - `pk` - The primary key of the entity
/// - `conn` - The database connection
/// # Returns
/// A result containing the deleted entity or a `CrudError`.
/// If the entity is not found, the error should be `CrudError::NotFound`.
#[async_trait]
pub trait DieselCrudDelete
where
Self: Sized,
{
type PK;
async fn delete(pk: Self::PK, conn: &mut AsyncPgConnection) -> Result<Self, CrudError>;
}
/// Lists all entities in the table
///
/// Implementing the trait requires the `async_trait` macro.
/// # Parameters
/// - `conn` - The database connection
/// # Returns
/// A result containing a Vec of entities or a `CrudError`.
#[async_trait]
pub trait DieselCrudList
where
Self: Sized,
{
async fn list(conn: &mut AsyncPgConnection) -> Result<Vec<Self>, CrudError>;
}

View File

@ -0,0 +1,12 @@
[package]
name = "into-response-derive"
version = "1.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }

View File

@ -0,0 +1,21 @@
use proc_macro::TokenStream;
use quote::quote;
use syn::DeriveInput;
// TODO derive generic types
pub fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
let name = &input.ident;
let expanded = quote! {
impl axum::response::IntoResponse for #name {
fn into_response(self) -> axum::response::Response {
let version = env!("CARGO_PKG_VERSION");
lib::serde::response::BaseResponse::new(version, self)
.into_response()
}
}
};
TokenStream::from(expanded)
}

View File

@ -0,0 +1,13 @@
extern crate proc_macro;
use {
proc_macro::TokenStream,
syn::{parse_macro_input, DeriveInput},
};
mod derive;
#[proc_macro_derive(IntoResponse)]
pub fn into_response_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
derive::into_response_derive_impl(input)
}

View File

@ -0,0 +1,13 @@
[package]
name = "read-files"
version = "0.1.0"
edition = { workspace = true }
rust-version = { workspace = true }
[lib]
proc-macro = true
[dependencies]
syn = { workspace = true }
quote = { workspace = true }
regex = "1.10"

View File

@ -0,0 +1,34 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use syn::parse_macro_input;
use crate::read_files::read_files_to_string_impl;
mod read_files;
/// Read files from a directory into a HashMap.
/// The key is the file path relative to the root directory.
/// The value is the file contents as a string.
/// # Arguments
/// * `path` - The directory to search for files, relative to the root directory.
/// * `pattern` - The regex pattern to match files against. If missing, all files are matched.
/// # Returns
/// A HashMap containing the file paths and contents.
/// # Example
/// ```
/// use read_files::read_files_to_string;
///
/// let files = read_files_to_string!("./src", ".rs$");
/// assert!(!files.is_empty());
/// ```
/// # Panics
/// If the path is empty. \
/// If the pattern is invalid. \
/// If the path does not exist. \
/// If there are unexpected tokens. \
#[proc_macro]
pub fn read_files_to_string(input: TokenStream) -> TokenStream {
let args = parse_macro_input!(input as read_files::Args);
read_files_to_string_impl(args)
}

View File

@ -0,0 +1,124 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use std::{
collections::HashMap,
fs::{metadata, read_dir, read_to_string},
io,
path::{Path, PathBuf},
};
use quote::quote;
use syn::{
parse::{Parse, ParseStream},
LitStr, Token,
};
pub fn read_files_to_string_impl(args: Args) -> TokenStream {
let (keys, values) = split_hashmap(args);
let expanded = quote! {
{
let keys = vec![#( #keys, )*];
let values = vec![#( #values, )*];
keys.into_iter()
.zip(values.into_iter())
.collect::<std::collections::HashMap<&'static str, &'static str>>()
}
};
expanded.into()
}
pub struct Args {
pub path: String,
pub pattern: String,
}
struct Syntax {
path: LitStr,
/* Comma */
pattern: Option<LitStr>,
}
impl From<Syntax> for Args {
fn from(syntax: Syntax) -> Self {
Self {
path: syntax.path.value(),
pattern: syntax
.pattern
.map(|pattern| pattern.value())
.unwrap_or_default(),
}
}
}
impl Parse for Args {
fn parse(stream: ParseStream) -> syn::Result<Self> {
if stream.is_empty() {
panic!("Expected path argument");
}
let path: LitStr = stream.parse()?;
if path.value().is_empty() {
panic!("Path must not be empty");
}
let pattern = if stream.peek(Token![,]) {
stream.parse::<Token![,]>()?;
Some(stream.parse()?)
} else {
None
};
let syntax = Syntax { path, pattern };
if !stream.is_empty() {
panic!("Expected end of input");
}
Ok(syntax.into())
}
}
pub fn split_hashmap(args: Args) -> (Vec<String>, Vec<String>) {
read_files_to_string(Path::new(&args.path), &args.pattern)
.unwrap()
.into_iter()
.map(|(key, value)| (key.to_string_lossy().to_string(), value))
.collect()
}
/// Find files within a directory and load them into a HashMap.
/// The key is the file path relative to the root directory.
/// The value is the file contents as a string.
/// # Arguments
/// * `path` - The directory to search for files.
/// * `extension` - The pattern to match files against.
/// # Returns
/// A HashMap containing the file paths and contents.
pub fn read_files_to_string(
path: &Path,
pattern: &str,
) -> Result<HashMap<PathBuf, String>, io::Error> {
use regex::Regex;
let mut files: HashMap<PathBuf, String> = HashMap::new();
let dir = read_dir(path)?;
for entry in dir {
let entry = entry?;
let path = entry.path();
let file_name = entry.file_name();
let file_name = file_name.to_string_lossy();
let metadata = metadata(&path)?;
let regex =
Regex::new(pattern).map_err(|e| io::Error::new(io::ErrorKind::InvalidInput, e))?;
if metadata.is_file() && regex.is_match(file_name.as_ref()) {
let file = read_to_string(&path)?;
files.insert(path, file);
} else if metadata.is_dir() {
files.extend(read_files_to_string(&path, pattern)?);
}
}
Ok(files)
}

View File

@ -0,0 +1,13 @@
use read_files::read_files_to_string;
#[test]
fn test_load_files() {
let files = read_files_to_string!("./src", ".rs$");
assert!(!files.is_empty());
}
#[test]
fn test_load_all_files() {
let files = read_files_to_string!("./src");
assert!(!files.is_empty());
}

18
crates/tests/Cargo.toml Normal file
View File

@ -0,0 +1,18 @@
[package]
name = "tests"
version = "0.1.0"
edition.workspace = true
rust-version.workspace = true
authors.workspace = true
homepage.workspace = true
[dependencies]
diesel = { workspace = true }
diesel-async = { workspace = true }
lib = { path = "../../../rust-lib", features = ["diesel", "derive"] }
derive_more = { workspace = true, features = ["constructor", "from"] }
[dev-dependencies]
tokio = { workspace = true, features = ["macros"] }
testcontainers-modules = { version = "0.9", features = ["postgres"] }
diesel_async_migrations = "0.14"

9
crates/tests/diesel.toml Normal file
View File

@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "/home/martin/git/rust/lib/crates/tests/migrations"

View File

View File

@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@ -0,0 +1 @@
DROP TABLE IF EXISTS "user" CASCADE;

View File

@ -0,0 +1,4 @@
CREATE TABLE "user"
(
email VARCHAR(255) PRIMARY KEY
);

1
crates/tests/src/lib.rs Normal file
View File

@ -0,0 +1 @@
// This file is needed to make the tests directory a crate

View File

@ -0,0 +1,61 @@
use diesel::{AsChangeset, Insertable, Queryable, Selectable};
use lib::diesel_crud_derive::{
DieselCrudCreate, DieselCrudDelete, DieselCrudList, DieselCrudRead, DieselCrudUpdate,
};
use lib::diesel_crud_trait::DieselCrudCreate;
use test_containers::create_test_containers_pool;
#[cfg(test)]
pub mod test_containers;
diesel::table! {
user (email) {
#[max_length = 255]
email -> Varchar,
}
}
#[derive(
Debug,
PartialEq,
Queryable,
Selectable,
Insertable,
AsChangeset,
DieselCrudCreate,
DieselCrudDelete,
DieselCrudList,
DieselCrudRead,
DieselCrudUpdate,
)]
#[diesel_crud(insert = InsertUser)]
#[diesel(table_name = user)]
struct User {
#[diesel_crud(pk)]
email: String,
}
#[derive(Clone, Insertable)]
#[diesel(table_name = user)]
struct InsertUser {
email: String,
}
#[tokio::test]
async fn test_insert_user() {
let container = create_test_containers_pool().await.unwrap();
let mut conn = container.pool.get().await.unwrap();
let user = User::insert(
InsertUser {
email: "test".to_string(),
},
&mut conn,
)
.await;
assert_eq!(
user,
Ok(User {
email: "test".to_string()
})
);
}

View File

@ -0,0 +1,50 @@
use derive_more::{Constructor, From};
use diesel_async::pooled_connection::deadpool::{BuildError, PoolError};
use diesel_async::AsyncPgConnection;
use diesel_async_migrations::EmbeddedMigrations;
use lib::diesel::pool::{create_pool_from_url, PgPool};
use lib::diesel::DieselError;
use testcontainers_modules::postgres::Postgres;
use testcontainers_modules::testcontainers::runners::AsyncRunner;
use testcontainers_modules::testcontainers::{ContainerAsync, TestcontainersError};
/// When the TestContainer is dropped, the container will be removed.
/// # Errors
/// If destructed and the container field is dropped, the container will be stopped
/// and all connections from the pool will result in DatabaseError.
#[derive(Constructor)]
pub struct TestContainer {
pub container: ContainerAsync<Postgres>,
pub pool: PgPool,
}
#[derive(Debug, From)]
pub enum ContainerError {
TestContainers(TestcontainersError),
BuildError(BuildError),
PoolError(PoolError),
DieselError(DieselError),
}
pub async fn create_test_containers_pool<'a>() -> Result<TestContainer, ContainerError> {
let container = create_postgres_container().await?;
let connection_string = format!(
"postgres://postgres:postgres@localhost:{}/postgres",
container.get_host_port_ipv4(5432).await?
);
let pool = create_pool_from_url(connection_string)?;
run_migrations(pool.get().await?.as_mut()).await?;
Ok(TestContainer::new(container, pool))
}
pub(crate) async fn run_migrations(
conn: &mut AsyncPgConnection,
) -> Result<(), diesel::result::Error> {
static EMBEDDED_MIGRATIONS: EmbeddedMigrations =
diesel_async_migrations::embed_migrations!("./migrations");
EMBEDDED_MIGRATIONS.run_pending_migrations(conn).await
}
pub async fn create_postgres_container() -> Result<ContainerAsync<Postgres>, TestcontainersError> {
Postgres::default().start().await
}

46
derive/Cargo.lock generated
View File

@ -1,46 +0,0 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
[[package]]
name = "derive"
version = "1.0.0"
dependencies = [
"quote",
"syn",
]
[[package]]
name = "proc-macro2"
version = "1.0.85"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22244ce15aa966053a896d1accb3a6e68469b97c7f33f284b99f0d576879fc23"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.36"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7"
dependencies = [
"proc-macro2",
]
[[package]]
name = "syn"
version = "2.0.67"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff8655ed1d86f3af4ee3fd3263786bc14245ad17c4c7e85ba7187fb3ae028c90"
dependencies = [
"proc-macro2",
"quote",
"unicode-ident",
]
[[package]]
name = "unicode-ident"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b"

View File

@ -1,12 +0,0 @@
[package]
name = "derive"
version = "1.0.0"
edition = "2021"
authors = ["Martin Berg Alstad"]
[lib]
proc-macro = true
[dependencies]
syn = "2.0.66"
quote = "1.0.36"

View File

@ -1,27 +0,0 @@
extern crate proc_macro;
use proc_macro::TokenStream;
use quote::quote;
use syn::{parse_macro_input, DeriveInput};
#[proc_macro_derive(IntoResponse)]
pub fn into_response_derive(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
into_response_derive_impl(input)
}
fn into_response_derive_impl(input: DeriveInput) -> TokenStream {
let name = &input.ident;
let expanded = quote! {
impl IntoResponse for #name {
fn into_response(self) -> Response {
let version = env!("CARGO_PKG_VERSION");
lib::serde::response::BaseResponse::new(version, self)
.into_response()
}
}
};
TokenStream::from(expanded)
}

View File

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "addr2line"
@ -57,7 +57,7 @@ dependencies = [
"serde_urlencoded",
"sync_wrapper 1.0.1",
"tokio",
"tower",
"tower 0.4.13",
"tower-layer",
"tower-service",
"tracing",
@ -123,6 +123,26 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
[[package]]
name = "derive_more"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4a9b99b9cbbe49445b21764dc0625032a89b145a2642e67603e1c936f5458d05"
dependencies = [
"derive_more-impl",
]
[[package]]
name = "derive_more-impl"
version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cb7330aeadfbe296029522e6c40f315320aba36fc43a5b3632f3795348f3bd22"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "encoding_rs"
version = "0.8.34"
@ -286,12 +306,14 @@ checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
[[package]]
name = "lib"
version = "1.3.3"
version = "1.4.3"
dependencies = [
"axum",
"derive_more",
"mime",
"thiserror",
"tokio",
"tower",
"tower 0.5.0",
"tower-http",
"tracing",
"tracing-subscriber",
@ -338,13 +360,14 @@ dependencies = [
[[package]]
name = "mio"
version = "0.8.11"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c"
checksum = "80e04d1dcff3aae0704555fe5fee3bcfaf3d1fdf8a7e521d5b9d2b42acb52cec"
dependencies = [
"hermit-abi",
"libc",
"wasi",
"windows-sys 0.48.0",
"windows-sys",
]
[[package]]
@ -383,16 +406,6 @@ dependencies = [
"winapi",
]
[[package]]
name = "num_cpus"
version = "1.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43"
dependencies = [
"hermit-abi",
"libc",
]
[[package]]
name = "object"
version = "0.36.1"
@ -563,7 +576,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c"
dependencies = [
"libc",
"windows-sys 0.52.0",
"windows-sys",
]
[[package]]
@ -627,25 +640,24 @@ dependencies = [
[[package]]
name = "tokio"
version = "1.38.0"
version = "1.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba4f4a02a7a80d6f274636f0aa95c7e383b912d41fe721a31f29e29698585a4a"
checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998"
dependencies = [
"backtrace",
"libc",
"mio",
"num_cpus",
"pin-project-lite",
"socket2",
"tokio-macros",
"windows-sys 0.48.0",
"windows-sys",
]
[[package]]
name = "tokio-macros"
version = "2.3.0"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a"
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
@ -668,6 +680,16 @@ dependencies = [
"tracing",
]
[[package]]
name = "tower"
version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "36b837f86b25d7c0d7988f00a54e74739be6477f2aac6201b8f429a7569991b7"
dependencies = [
"tower-layer",
"tower-service",
]
[[package]]
name = "tower-http"
version = "0.5.2"
@ -801,37 +823,13 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f"
[[package]]
name = "windows-sys"
version = "0.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9"
dependencies = [
"windows-targets 0.48.5",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.5",
]
[[package]]
name = "windows-targets"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c"
dependencies = [
"windows_aarch64_gnullvm 0.48.5",
"windows_aarch64_msvc 0.48.5",
"windows_i686_gnu 0.48.5",
"windows_i686_msvc 0.48.5",
"windows_x86_64_gnu 0.48.5",
"windows_x86_64_gnullvm 0.48.5",
"windows_x86_64_msvc 0.48.5",
"windows-targets",
]
[[package]]
@ -840,46 +838,28 @@ version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6f0713a46559409d202e70e28227288446bf7841d3211583a4b53e3f6d96e7eb"
dependencies = [
"windows_aarch64_gnullvm 0.52.5",
"windows_aarch64_msvc 0.52.5",
"windows_i686_gnu 0.52.5",
"windows_aarch64_gnullvm",
"windows_aarch64_msvc",
"windows_i686_gnu",
"windows_i686_gnullvm",
"windows_i686_msvc 0.52.5",
"windows_x86_64_gnu 0.52.5",
"windows_x86_64_gnullvm 0.52.5",
"windows_x86_64_msvc 0.52.5",
"windows_i686_msvc",
"windows_x86_64_gnu",
"windows_x86_64_gnullvm",
"windows_x86_64_msvc",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7088eed71e8b8dda258ecc8bac5fb1153c5cffaf2578fc8ff5d61e23578d3263"
[[package]]
name = "windows_aarch64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9985fd1504e250c615ca5f281c3f7a6da76213ebd5ccc9561496568a2752afb6"
[[package]]
name = "windows_i686_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e"
[[package]]
name = "windows_i686_gnu"
version = "0.52.5"
@ -892,48 +872,24 @@ version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "87f4261229030a858f36b459e748ae97545d6f1ec60e5e0d6a3d32e0dc232ee9"
[[package]]
name = "windows_i686_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406"
[[package]]
name = "windows_i686_msvc"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "db3c2bf3d13d5b658be73463284eaf12830ac9a26a90c717b7f771dfe97487bf"
[[package]]
name = "windows_x86_64_gnu"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e4246f76bdeff09eb48875a0fd3e2af6aada79d409d33011886d3e1581517d9"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "852298e482cd67c356ddd9570386e2862b5673c85bd5f88df9ab6802b334c596"
[[package]]
name = "windows_x86_64_msvc"
version = "0.48.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.5"

View File

@ -6,4 +6,4 @@ edition = "2021"
[dependencies]
lib = { path = "../..", features = ["axum"] }
axum = "0.7.5"
tokio = { version = "1.38.0", features = ["rt-multi-thread", "macros"] }
tokio = { version = "1.40", features = ["rt-multi-thread", "macros"] }

View File

@ -1,8 +1,15 @@
use {
axum::{extract::Request, handler::Handler, Router, ServiceExt},
std::{io, net::Ipv4Addr, net::SocketAddr},
axum::{
extract::Request, handler::Handler, response::IntoResponse, routing::Route, Router,
ServiceExt,
},
std::{
convert::Infallible,
io,
net::{IpAddr, Ipv4Addr, SocketAddr},
},
tokio::net::TcpListener,
tower::layer::Layer,
tower::{layer::Layer, Service},
tower_http::{
cors::CorsLayer,
normalize_path::NormalizePathLayer,
@ -26,32 +33,70 @@ macro_rules! create_app {
#[derive(Default)]
pub struct AppBuilder {
router: Router,
socket: Option<(Ipv4Addr, u16)>,
socket: Option<(IpAddr, u16)>,
cors: Option<CorsLayer>,
normalize_path: Option<bool>,
tracing: Option<TraceLayer<HttpMakeClassifier>>,
}
impl AppBuilder {
/// Creates a new app builder with default options.
pub fn new() -> Self {
Self::default()
}
/// Creates the builder from the given router.
/// Only the routes and layers will be used.
pub fn from_router(router: Router) -> Self {
Self {
router,
..Self::default()
}
}
/// Adds a route to the previously added routes
pub fn route(mut self, route: Router) -> Self {
self.router = self.router.merge(route);
self
}
/// Adds multiple routes to the previously added routes
pub fn routes(mut self, routes: impl IntoIterator<Item = Router>) -> Self {
self.router = routes.into_iter().fold(self.router, Router::merge);
self
}
pub fn socket(mut self, socket: impl Into<(Ipv4Addr, u16)>) -> Self {
self.socket = Some(socket.into());
/// Adds a layer to the previously added routes
pub fn layer<L>(mut self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + 'static,
L::Service: Service<Request> + Clone + Send + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
self.router = self.router.layer(layer);
self
}
/// Sets the socket for the server.
pub fn socket<IP: Into<IpAddr>>(mut self, socket: impl Into<(IP, u16)>) -> Self {
let (ip, port) = socket.into();
self.socket = Some((ip.into(), port));
self
}
/// Sets the port for the server.
pub fn port(mut self, port: u16) -> Self {
self.socket = if let Some((ip, _)) = self.socket {
Some((ip, port))
} else {
Some((Ipv4Addr::UNSPECIFIED.into(), port))
};
self
}
/// Sets the fallback handler.
pub fn fallback<H, T>(mut self, fallback: H) -> Self
where
H: Handler<T, ()>,
@ -61,42 +106,28 @@ impl AppBuilder {
self
}
/// Sets the cors layer.
pub fn cors(mut self, cors: CorsLayer) -> Self {
self.cors = Some(cors);
self
}
/// Sets the normalize path option. Default is true.
pub fn normalize_path(mut self, normalize_path: bool) -> Self {
self.normalize_path = Some(normalize_path);
self
}
/// Sets the trace layer.
pub fn tracing(mut self, tracing: TraceLayer<HttpMakeClassifier>) -> Self {
self.tracing = Some(tracing);
self
}
pub async fn serve(self) -> io::Result<()> {
let _ = fmt_trace(); // Allowed to fail
let listener = self.listener().await?;
if self.normalize_path.unwrap_or(true) {
let app = NormalizePathLayer::trim_trailing_slash().layer(self.create_app());
axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?;
} else {
let app = self.create_app();
axum::serve(listener, app.into_make_service()).await?;
};
Ok(())
}
async fn listener(&self) -> io::Result<TcpListener> {
let addr = SocketAddr::from(self.socket.unwrap_or((Ipv4Addr::UNSPECIFIED, 8000)));
info!("Initializing server on: {addr}");
TcpListener::bind(&addr).await
}
fn create_app(self) -> Router {
/// Creates the app with the given options.
/// This method is useful for testing purposes.
/// Options used for configuring the listener will be lost.
pub fn build(self) -> Router {
let mut app = self.router;
if let Some(cors) = self.cors {
app = app.layer(cors);
@ -109,6 +140,33 @@ impl AppBuilder {
),
)
}
/// Build the app and start the server
/// # Default Options
/// - IP == 0.0.0.0
/// - Port == 8000
/// - Cors == None
/// - Normalize Path == true
/// - Tracing == Default compact
pub async fn serve(self) -> io::Result<()> {
let _ = fmt_trace(); // Allowed to fail
let listener = self.listener().await?;
if self.normalize_path.unwrap_or(true) {
let app = NormalizePathLayer::trim_trailing_slash().layer(self.build());
axum::serve(listener, ServiceExt::<Request>::into_make_service(app)).await?;
} else {
let app = self.build();
axum::serve(listener, app.into_make_service()).await?;
};
Ok(())
}
async fn listener(&self) -> io::Result<TcpListener> {
let addr = SocketAddr::from(self.socket.unwrap_or((Ipv4Addr::UNSPECIFIED.into(), 8000)));
info!("Initializing server on: {addr}");
TcpListener::bind(&addr).await
}
}
fn fmt_trace() -> Result<(), String> {
@ -121,43 +179,37 @@ fn fmt_trace() -> Result<(), String> {
#[cfg(test)]
mod tests {
use axum::Router;
use super::*;
use axum::Router;
use std::time::Duration;
use tokio::time::sleep;
mod tokio_tests {
use std::time::Duration;
#[tokio::test]
async fn test_app_builder_serve() {
let handler = tokio::spawn(async {
AppBuilder::new().serve().await.unwrap();
});
sleep(Duration::from_millis(250)).await;
handler.abort();
}
use tokio::time::sleep;
use super::*;
#[tokio::test]
async fn test_app_builder_serve() {
let handler = tokio::spawn(async {
AppBuilder::new().serve().await.unwrap();
});
sleep(Duration::from_secs(1)).await;
handler.abort();
}
#[tokio::test]
async fn test_app_builder_all() {
let handler = tokio::spawn(async {
AppBuilder::new()
.socket((Ipv4Addr::LOCALHOST, 8080))
.routes([Router::new()])
.fallback(|| async { "Fallback" })
.cors(CorsLayer::new())
.normalize_path(true)
.tracing(TraceLayer::new_for_http())
.serve()
.await
.unwrap();
});
sleep(Duration::from_secs(1)).await;
handler.abort();
}
#[tokio::test]
async fn test_app_builder_all() {
let handler = tokio::spawn(async {
AppBuilder::new()
.socket((Ipv4Addr::LOCALHOST, 8080))
.routes([Router::new()])
.fallback(|| async { "Fallback" })
.cors(CorsLayer::new())
.normalize_path(true)
.tracing(TraceLayer::new_for_http())
.layer(TraceLayer::new_for_http())
.serve()
.await
.unwrap();
});
sleep(Duration::from_millis(250)).await;
handler.abort();
}
#[test]

View File

@ -6,57 +6,24 @@ use axum::{
},
response::IntoResponse,
};
use mime::Mime;
use std::str::FromStr;
use thiserror::Error;
#[derive(PartialEq, Eq, Ord, PartialOrd, Hash, Debug, Clone, Copy)]
pub enum ContentType {
Json,
Form,
Multipart,
Pdf,
Html,
Unknown,
}
impl From<&str> for ContentType {
fn from(content_type: &str) -> Self {
match content_type {
"application/json" => ContentType::Json,
"application/x-www-form-urlencoded" => ContentType::Form,
"multipart/form-data" => ContentType::Multipart,
"application/pdf" => ContentType::Pdf,
"text/html" => ContentType::Html,
_ => ContentType::Unknown,
}
}
}
impl From<String> for ContentType {
fn from(content_type: String) -> Self {
ContentType::from(content_type.as_str())
}
}
impl From<Option<&str>> for ContentType {
fn from(content_type: Option<&str>) -> Self {
content_type
.map(ContentType::from)
.unwrap_or(ContentType::Unknown)
}
}
/// A file extracted from a multipart request.
#[derive(Debug, Clone, PartialEq)]
pub struct File {
pub filename: String,
pub bytes: Vec<u8>,
pub content_type: ContentType,
pub content_type: Mime,
}
impl File {
/// Creates a new file with the given filename, bytes and content type.
pub fn new(
filename: impl Into<String>,
bytes: impl Into<Vec<u8>>,
content_type: impl Into<ContentType>,
content_type: impl Into<Mime>,
) -> Self {
Self {
filename: filename.into(),
@ -65,12 +32,15 @@ impl File {
}
}
async fn from_field(field: Field<'_>) -> Result<Self, MultipartFileRejection> {
/// Creates a new file from a field in a multipart request.
pub async fn from_field(field: Field<'_>) -> Result<Self, MultipartFileRejection> {
let filename = field
.file_name()
.ok_or(MultipartFileRejection::MissingFilename)?
.to_string();
let content_type: ContentType = field.content_type().into();
let content_type = Mime::from_str(field.content_type().ok_or_else(|| {
MultipartFileRejection::FieldError("Missing or illegal content type".to_string())
})?)?;
let bytes = field.bytes().await?;
Ok(File::new(filename, bytes, content_type))
}
@ -87,12 +57,15 @@ pub struct MultipartFile(pub File);
#[derive(Debug, Clone, PartialEq)]
pub struct MultipartFiles(pub Vec<File>);
/// Rejection type for multipart file extractors.
#[derive(Debug, Error)]
pub enum MultipartFileRejection {
#[error(transparent)]
MultipartRejection(#[from] MultipartRejection),
#[error("Field error: {0}")]
FieldError(String),
#[error(transparent)]
FromStrError(#[from] mime::FromStrError),
#[error("No files found")]
NoFiles,
#[error("Expected one file, got several")]
@ -130,6 +103,9 @@ impl IntoResponse for MultipartFileRejection {
MultipartFileRejection::BodyError(error) => {
(axum::http::StatusCode::BAD_REQUEST, error).into_response()
}
MultipartFileRejection::FromStrError(error) => {
(axum::http::StatusCode::BAD_REQUEST, error.to_string()).into_response()
}
}
}
}
@ -141,6 +117,19 @@ where
{
type Rejection = MultipartFileRejection;
/// Extracts a single file from a multipart request.
/// Expects exactly one file. A file must have a name, bytes and optionally a content type.
/// This extractor consumes the request and must ble placed last in the handler.
/// # Example
/// ```
/// use std::str::from_utf8;
/// use axum::response::Html;
/// use lib::axum::extractor::MultipartFile;
///
/// async fn upload_file(MultipartFile(file): MultipartFile) -> Html<String> {
/// Html(String::from_utf8(file.bytes).unwrap())
/// }
/// ```
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
let multipart = Multipart::from_request(req, state).await?;
let files = get_files(multipart).await?;
@ -160,6 +149,24 @@ where
{
type Rejection = MultipartFileRejection;
/// Extracts multiple files from a multipart request.
/// Expects at least one file. A file must have a name, bytes and optionally a content type.
/// This extractor consumes the request and must ble placed last in the handler.
/// # Example
/// ```
/// use axum::response::Html;
/// use lib::axum::extractor::MultipartFiles;
/// use std::str::from_utf8;
///
/// async fn upload_files(MultipartFiles(files): MultipartFiles) -> Html<String> {
/// let content = files
/// .iter()
/// .map(|file| String::from_utf8(file.bytes.clone()).unwrap())
/// .collect::<Vec<String>>()
/// .join("<br>");
/// Html(content)
/// }
/// ```
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
let multipart = Multipart::from_request(req, state).await?;
let files = get_files(multipart).await?;

View File

@ -1,5 +1,9 @@
#[cfg(feature = "io")]
use {crate::io::file, axum::body::Body, axum::response::Html, std::io};
use {
crate::io::file,
axum::{body::Body, response::Html},
std::io,
};
/// Load an HTML file from the given file path, relative to the current directory.
/// # Arguments

View File

@ -4,3 +4,5 @@ pub mod load;
#[cfg(feature = "serde")]
pub mod response;
pub mod router;
#[cfg(feature = "serde")]
pub mod wrappers;

View File

@ -18,6 +18,7 @@ mod tests {
use axum::http::header::CONTENT_TYPE;
use axum::http::{HeaderValue, StatusCode};
use axum::response::IntoResponse;
use mime::APPLICATION_JSON;
use serde::Serialize;
use crate::serde::response::BaseResponse;
@ -39,7 +40,7 @@ mod tests {
assert_eq!(json_response.status(), StatusCode::OK);
assert_eq!(
json_response.headers().get(CONTENT_TYPE),
Some(&HeaderValue::from_static("application/json"))
Some(&HeaderValue::from_static(APPLICATION_JSON.as_ref()))
);
}

View File

@ -20,12 +20,22 @@
#[macro_export]
macro_rules! router {
($body:expr) => {
pub(crate) fn router() -> axum::Router {
pub fn router() -> axum::Router {
$body
}
};
($body:expr; $state:ty) => {
pub(crate) fn router() -> axum::Router<$state> {
pub fn router() -> axum::Router<$state> {
$body
}
};
($body:expr; $state:ident: $($bound:tt),*) => {
pub fn router<$state: $($bound+)* 'static>() -> axum::Router<$state> {
$body
}
};
($body:expr; $generic:ident: $($bound:tt),* -> $state:ty) => {
pub fn router<$generic: $($bound+)* 'static>() -> axum::Router<$state<$generic>> {
$body
}
};
@ -35,6 +45,12 @@ macro_rules! router {
($route:expr, $router:expr, $state:ty) => {
router!(axum::Router::new().nest($route, $router); $state);
};
($route:expr, $router:expr, $state:ident: $($bound:tt),*) => {
router!(axum::Router::new().nest($route, $router); $state: $($bound),*);
};
($route:expr, $router:expr, $generic:ident: $($bound:tt),* -> $state:ty) => {
router!(axum::Router::new().nest($route, $router); $generic: $($bound),* -> $state);
};
($($method:ident $route:expr => $func:expr),* $(,)?) => {
router!($crate::routes!($($method $route => $func),*));
};
@ -60,6 +76,11 @@ macro_rules! routes {
};
}
/// Merges the given routers into a single router.
/// # Examples
/// ```
/// let _: axum::Router<()> = lib::join_routes![axum::Router::new(), axum::Router::new()];
/// ```
#[macro_export]
macro_rules! join_routes {
($($route:expr),* $(,)?) => {
@ -112,6 +133,18 @@ mod tests {
);
}
#[test]
fn test_nested_router_with_generic_state() {
router!(
"/simplify",
routes!(
get "/:exp" => || async {},
get "/table/:exp" => |_state: State<T>| async {}
),
T: Clone, Send, Sync
);
}
#[test]
fn test_routes() {
let _router: Router = routes!(

24
src/axum/wrappers.rs Normal file
View File

@ -0,0 +1,24 @@
use axum::response::{IntoResponse, Response};
use derive_more::{Constructor, From};
use into_response_derive::IntoResponse;
use serde::Serialize;
/// Wrapper for a vector of items.
#[derive(Debug, Clone, PartialEq, Default, Serialize, From, Constructor)]
pub struct Array<T: Serialize> {
pub data: Vec<T>,
}
/// Wrapper for a count.
#[derive(
Debug, Clone, Copy, PartialEq, Eq, Default, Serialize, IntoResponse, From, Constructor,
)]
pub struct Count {
pub count: usize,
}
impl<T: Serialize> IntoResponse for Array<T> {
fn into_response(self) -> Response {
crate::from!(self).into_response()
}
}

4
src/diesel/mod.rs Normal file
View File

@ -0,0 +1,4 @@
pub mod pool;
/// Re-export diesel::result::Error as DieselError
pub type DieselError = diesel::result::Error;

24
src/diesel/pool.rs Normal file
View File

@ -0,0 +1,24 @@
use deadpool_diesel::postgres::BuildError;
use diesel_async::pooled_connection::deadpool::Pool;
use diesel_async::pooled_connection::AsyncDieselConnectionManager;
use diesel_async::AsyncPgConnection;
/// A type alias for the asynchronous PostgreSQL connection pool.
pub type PgPool = Pool<AsyncPgConnection>;
/// Create a deadpool connection pool from the given URL.
/// Using the default pool size and other settings.
pub fn create_pool_from_url(url: impl Into<String>) -> Result<PgPool, BuildError> {
let config = AsyncDieselConnectionManager::<AsyncPgConnection>::new(url);
Pool::builder(config).build()
}
/// Create a deadpool connection pool from the given URL.
/// Using the given pool size and other default settings.
pub fn create_pool_from_url_with_size(
url: impl Into<String>,
size: usize,
) -> Result<PgPool, BuildError> {
let config = AsyncDieselConnectionManager::<AsyncPgConnection>::new(url);
Pool::builder(config).max_size(size).build()
}

View File

@ -1,5 +1,10 @@
use {std::io::Error, tokio::fs::File, tokio_util::io::ReaderStream};
/// Loads a file from the file system and returns a stream of bytes.
/// # Arguments
/// * `file_path` - The path to the file to load.
/// # Returns
/// A stream of bytes from the file if the file is found. Otherwise, an error is returned.
pub async fn load_file<Path>(file_path: Path) -> Result<ReaderStream<File>, Error>
where
Path: AsRef<std::path::Path>,

View File

@ -1,16 +1,26 @@
#![allow(dead_code)]
#[cfg(all(feature = "derive", feature = "diesel"))]
pub extern crate diesel_crud_derive;
#[cfg(feature = "diesel")]
pub extern crate diesel_crud_trait;
#[cfg(all(feature = "derive", feature = "axum", feature = "serde"))]
pub extern crate into_response_derive;
#[cfg(feature = "read-files")]
pub extern crate read_files;
extern crate self as lib;
#[cfg(feature = "axum")]
pub mod axum;
#[cfg(feature = "diesel")]
pub mod diesel;
#[cfg(feature = "io")]
pub mod io;
#[cfg(feature = "nom")]
pub mod nom;
#[cfg(feature = "serde")]
pub mod serde;
#[cfg(feature = "time")]
pub mod time;
pub mod traits;
#[cfg(feature = "iter")]
pub mod vector;
#[cfg(all(feature = "derive", feature = "serde"))]
pub extern crate derive;

View File

@ -31,7 +31,7 @@ pub fn parenthesized<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IRe
where
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
{
delimited(char('('), trim(inner), char(')'))
delimited(char('('), inner, char(')'))
}
/// Take where the predicate is true and the length is exactly `n`
@ -47,6 +47,29 @@ where
take_while_m_n(n, n, predicate)
}
/// Parse the inner parser and then the end of the input.
/// Very useful for ensuring that the entire input is consumed.
/// - Parameters
/// - `inner`: The parser to run
/// - Returns: A parser that runs the inner parser and then the end of the input
/// # Example
/// ```
/// use nom::bytes::complete::{tag};
/// use lib::nom::combinators::exhausted;
///
/// let input = "test";
/// let (remaining, result) = exhausted(tag("test"))(input).unwrap();
/// assert_eq!(remaining, "");
/// assert_eq!(result, "test");
/// ```
/// - Fails if the input is not exhausted
/// ```
/// use nom::bytes::complete::{tag};
/// use lib::nom::combinators::exhausted;
///
/// let input = "test";
/// assert!(exhausted(tag("tes"))(input).is_err());
/// ```
pub fn exhausted<'a, Parser, R>(inner: Parser) -> impl FnMut(&'a str) -> IResult<&'a str, R>
where
Parser: FnMut(&'a str) -> IResult<&'a str, R>,
@ -56,9 +79,10 @@ where
#[cfg(test)]
mod tests {
use super::*;
use nom::{bytes::complete::take_while, sequence::tuple};
use super::*;
#[test]
fn test_trim_both_sides() {
let input = " test ";

View File

@ -4,7 +4,7 @@ use serde::Serialize;
pub struct BaseResponse<T: Serialize> {
pub version: String,
#[serde(flatten)]
pub body: T, // T must be a struct (or enum?)
pub body: T, // T must be a struct (or enum?) TODO from! macro that validates T on compile time
}
impl<T: Serialize> BaseResponse<T> {
@ -16,6 +16,14 @@ impl<T: Serialize> BaseResponse<T> {
}
}
// TODO version should reference the version in caller's Cargo.toml
#[macro_export]
macro_rules! from {
($body:expr) => {
$crate::serde::response::BaseResponse::new(env!("CARGO_PKG_VERSION"), $body)
};
}
#[cfg(test)]
mod tests {
use super::*;
@ -35,4 +43,14 @@ mod tests {
);
assert_eq!(response.body.message, "Hi".to_string());
}
#[test]
fn test_from_macro() {
let response = from!(Response {
message: "Hi".to_string(),
});
from!(1); // Should not be allowed
assert_eq!(response.version, env!("CARGO_PKG_VERSION"));
assert_eq!(response.body.message, "Hi".to_string());
}
}

64
src/time/common.rs Normal file
View File

@ -0,0 +1,64 @@
use chrono::NaiveDateTime;
use derive_more::{Constructor, From};
/// Represents a date-time interval using naive date-time.
/// All date-times are expected to be in UTC.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Constructor, From)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
pub struct DateTimeInterval {
pub start: NaiveDateTime,
pub end: NaiveDateTime,
}
impl DateTimeInterval {
/// Creates a new `DateTimeInterval` from the given `start` and `end` `NaiveDateTime`s.
/// The `start` must be before the `end`.
/// If `start` is equal to or after `end`, this function will return `None`.
/// # Arguments
/// * `start` - The start of the interval.
/// * `end` - The end of the interval.
/// # Returns
/// A new `DateTimeInterval` if `start` is before `end`, otherwise `None`.
/// # Examples
/// ```
/// use chrono::{NaiveDateTime, Duration, Utc};
/// use lib::time::DateTimeInterval;
///
/// let start = Utc::now().naive_utc();
/// let end = start + Duration::days(1);
/// let interval = DateTimeInterval::new_safe(start, end);
/// assert_eq!(interval, Some((start, end).into()));
/// ```
/// Illegal interval:
/// ```
/// use chrono::{NaiveDateTime, Utc};
/// use lib::time::DateTimeInterval;
/// let start = Utc::now().naive_utc();
/// let end = start;
/// let interval = DateTimeInterval::new_safe(start, end);
/// assert_eq!(interval, None);
pub fn new_safe(start: NaiveDateTime, end: NaiveDateTime) -> Option<Self> {
if start < end {
Some(Self::new(start, end))
} else {
None
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::DateTime;
#[test]
fn test_new_safe() {
let start = DateTime::from_timestamp(0, 0).unwrap().naive_utc();
let end = DateTime::from_timestamp(1, 0).unwrap().naive_utc();
assert_eq!(
DateTimeInterval::new_safe(start, end),
Some(DateTimeInterval::new(start, end))
);
assert_eq!(DateTimeInterval::new_safe(end, start), None);
}
}

3
src/time/mod.rs Normal file
View File

@ -0,0 +1,3 @@
mod common;
pub use common::DateTimeInterval;

View File

@ -1,4 +1,6 @@
/// Modify self to contain only distinct elements.
pub trait Distinct {
/// Modify self to contain only distinct elements.
fn distinct(&mut self);
}

View File

@ -1,6 +1,38 @@
/// Create a `HashMap` with the given key-value pairs.
/// There are three ways to use this macro:
/// 1. `map!()`: Create an empty `HashMap`.
/// 2. `map!(usize; 1, 2)`: Create a `HashMap` with the keys `1` and `2` with the default value of `usize`.
/// 3. `map!("one" => 1, "two" => 2)`: Create a `HashMap` with the keys `"one"` and `"two"` with the values `1` and `2` respectively.
/// # Examples
/// ```
/// use std::collections::HashMap;
///
/// let empty_map: HashMap<usize, usize> = lib::map!();
/// assert_eq!(empty_map.len(), 0);
///
/// let map: HashMap<&str, usize> = lib::map!("one" => 1, "two" => 2);
/// assert_eq!(map.len(), 2);
/// assert_eq!(map.get("one"), Some(&1));
/// assert_eq!(map.get("two"), Some(&2));
///
/// let map: HashMap<usize, usize> = lib::map!(usize; 1, 2);
/// assert_eq!(map.len(), 2);
/// assert_eq!(map.get(&1), Some(&0));
/// assert_eq!(map.get(&2), Some(&0));
/// ```
#[macro_export]
macro_rules! map {
() => { std::collections::HashMap::new() };
($default:ty; $($key:expr),* $(,)?) => {
{
#[allow(unused_mut)]
let mut temp_map = std::collections::HashMap::new();
$(
temp_map.insert($key, <$default>::default());
)*
temp_map
}
};
($($k:expr => $v:expr),* $(,)?) => {
{
let mut temp_map = std::collections::HashMap::new();
@ -33,4 +65,19 @@ mod tests {
assert_eq!(map.get("two"), Some(&2));
assert_eq!(map.get("three"), Some(&3));
}
#[test]
fn test_map_only_keys() {
let map: HashMap<usize, usize> = map!(usize; 1, 2, 3);
assert_eq!(map.len(), 3);
assert_eq!(map.get(&1), Some(&0));
assert_eq!(map.get(&2), Some(&0));
assert_eq!(map.get(&3), Some(&0));
}
#[test]
fn test_map_only_keys_0_keys() {
let map: HashMap<usize, usize> = map!(usize;);
assert_eq!(map.len(), 0);
}
}