This commit is contained in:
2024-10-21 16:39:31 +02:00
commit 0625b0fa4d
74 changed files with 5003 additions and 0 deletions

32
.cargo/config.toml Normal file
View File

@@ -0,0 +1,32 @@
# Cargo config file.
# See: https://doc.rust-lang.org/cargo/reference/config.html
# Environments variables set for all `cargo ...` commands.
[env]
# Scope down tracing, to filter out external lib tracing.
RUST_LOG="web_server=debug,lib_core=debug,lib_auth=debug,lib_utils=debug"
# -- Service Environment Variables
# IMPORTANT:
# For cargo commands only.
# For deployed env, should be managed by container
# (e.g., Kubernetes).
## -- Secrets
# Keys and passwords below are for localhost dev ONLY.
# e.g., "welcome" type of passwords.
# i.e., Encryption not needed.
SERVICE_DB_URL="postgres://app_user:dev_only_pwd@localhost/app_db"
SERVICE_PWD_KEY="CKUGFOD9_2Qf6Pn3ZFRYgPYb8ht4vKqEG9PGMXTB7497bT0367DjoaD6ydFnEVaIRda0kKeBZVCT5Hb62m2sCA"
SERVICE_TOKEN_KEY="9FoHBmkyxbgu_xFoQK7e0jz3RMNVJWgfvbVn712FBNH9LLaAWS3CS6Zpcg6RveiObvCUb6a2z-uAiLjhLh2igw"
SERVICE_TOKEN_DURATION_SEC="1800" # 30 minutes
## -- ConfigMap
# This will be relative to Cargo.toml
# In deployed images, probably use absolute path.
SERVICE_WEB_FOLDER="web-folder/"

39
.gitignore vendored Normal file
View File

@@ -0,0 +1,39 @@
# By Default, Ignore any .*, except .gitignore
.*
!.gitignore
# -- Rust
# For .cargo/config.toml
!.cargo/
target/
# For blueprint ignore Cargo.lock
# (commit Cargo.lock for real project)
Cargo.lock
# -- Safety net
dist/
__pycache__/
node_modules/
npm-debug.log
report.*.json
*.parquet
*.map
*.zip
*.gz
*.tar
*.tgz
# videos
*.mov
*.mp4
# images
*.icns
*.ico
*.jpeg
*.jpg
*.gif
*.png

19
Cargo.toml Normal file
View File

@@ -0,0 +1,19 @@
[workspace.lints.rust]
unsafe_code = "forbid"
# unused = "allow" # For experimental dev.
[workspace]
resolver = "2"
members = [
# -- Application Libraries
"crates/libs/lib-utils", # e.g., base64, time.
"crates/libs/lib-rpc", # e.g., rpc routing.
"crates/libs/lib-auth", # e.g., for pwd, token.
"crates/libs/lib-core", # e.g., model, ctx, config.
# -- Application Services
"crates/services/web-server",
# -- Tools
"crates/tools/gen-key",
]

201
LICENSE-APACHE Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2023 Jeremy Chone
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
LICENSE-MIT Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2023 Jeremy Chone
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

92
README.md Normal file
View File

@@ -0,0 +1,92 @@
# Rust10x Web App Blueprint for Production Coding
More info at: https://rust10x.com/web-app
## Rust10x Web App YouTube Videos:
- [Episode 01 - Rust Web App - Base Production Code](https://youtube.com/watch?v=3cA_mk4vdWY&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Topic video - Code clean - `#[cfg_attr(...)]` for unit test](https://www.youtube.com/watch?v=DCPs5VRTK-U&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Topic video - The Reasoning Behind Differentiating ModelControllers and ModelManager](https://www.youtube.com/watch?v=JdLi69mWIIE&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Topic video - Base64Url - Understanding the Usage and Significance of Base64URL](https://www.youtube.com/watch?v=-9K7zNgsbP0&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Episode 02 - Sea-Query (sql builder) & modql (mongodb like filter)](https://www.youtube.com/watch?v=-dMH9UiwKqg&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Episode 03 - Cargo Workspace (multi-crates)](https://www.youtube.com/watch?v=zUxF0kvydJs&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [AI-Voice-Remastered](https://www.youtube.com/watch?v=iCGIqEWWTcA&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- [Episode 04 - Multi-Scheme Password Hashing](https://www.youtube.com/watch?v=3E0zK5h9zEs&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
- Other Related videos:
- [Rust Axum Full Course](https://youtube.com/watch?v=XZtlD_m59sM&list=PL7r-PXl6ZPcCIOFaL7nVHXZvBmHNhrh_Q)
## Starting the DB
```sh
# Start postgresql server docker image:
docker run --rm --name pg -p 5432:5432 \
-e POSTGRES_PASSWORD=welcome \
postgres:15
# (optional) To have a psql terminal on pg.
# In another terminal (tab) run psql:
docker exec -it -u postgres pg psql
# (optional) For pg to print all sql statements.
# In psql command line started above.
ALTER DATABASE postgres SET log_statement = 'all';
```
## Dev (watch)
> NOTE: Install cargo watch with `cargo install cargo-watch`.
```sh
# Terminal 1 - To run the server.
cargo watch -q -c -w crates/services/web-server/src/ -w crates/libs/ -w .cargo/ -x "run -p web-server"
# Terminal 2 - To run the quick_dev.
cargo watch -q -c -w crates/services/web-server/examples/ -x "run -p web-server --example quick_dev"
```
## Dev
```sh
# Terminal 1 - To run the server.
cargo run -p web-server
# Terminal 2 - To run the tests.
cargo run -p web-server --example quick_dev
```
## Unit Test (watch)
```sh
cargo watch -q -c -x "test -- --nocapture"
# Specific test with filter.
cargo watch -q -c -x "test -p lib-core test_create -- --nocapture"
```
## Unit Test
```sh
cargo test -- --nocapture
cargo watch -q -c -x "test -p lib-core model::task::tests::test_create -- --nocapture"
```
## Tools
```sh
cargo run -p gen-key
```
<br />
---
More resources for [Rust for Production Coding](https://rust10x.com)
[This repo on GitHub](https://github.com/rust10x/rust-web-app)

View File

@@ -0,0 +1,29 @@
[package]
name = "lib-auth"
version = "0.1.0"
edition = "2021"
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
# -- App Libs
lib-utils = { path = "../../libs/lib-utils"}
# -- Json
serde = { version = "1", features = ["derive"] }
# -- Hashing (pwd-scheme01 & Token)
hmac = "0.12"
sha2 = "0.10"
# -- Hashing (pwd-scheme02)
argon2 = {version="0.5", features=["std"]}
# -- Others
uuid = {version = "1", features = ["v4","fast-rng",]}
lazy-regex = "3"
derive_more = {version = "1.0.0-beta", features = ["from"] }
enum_dispatch = "0.3"
[dev-dependencies]
anyhow = "1"

View File

@@ -0,0 +1,33 @@
use lib_utils::envs::{get_env_b64u_as_u8s, get_env_parse};
use std::sync::OnceLock;
pub fn auth_config() -> &'static AuthConfig {
static INSTANCE: OnceLock<AuthConfig> = OnceLock::new();
INSTANCE.get_or_init(|| {
AuthConfig::load_from_env().unwrap_or_else(|ex| {
panic!("FATAL - WHILE LOADING CONF - Cause: {ex:?}")
})
})
}
#[allow(non_snake_case)]
pub struct AuthConfig {
// -- Crypt
pub PWD_KEY: Vec<u8>,
pub TOKEN_KEY: Vec<u8>,
pub TOKEN_DURATION_SEC: f64,
}
impl AuthConfig {
fn load_from_env() -> lib_utils::envs::Result<AuthConfig> {
Ok(AuthConfig {
// -- Crypt
PWD_KEY: get_env_b64u_as_u8s("SERVICE_PWD_KEY")?,
TOKEN_KEY: get_env_b64u_as_u8s("SERVICE_TOKEN_KEY")?,
TOKEN_DURATION_SEC: get_env_parse("SERVICE_TOKEN_DURATION_SEC")?,
})
}
}

View File

@@ -0,0 +1,5 @@
mod config;
pub mod pwd;
pub mod token;
use config::auth_config;

View File

@@ -0,0 +1,27 @@
use crate::pwd::scheme;
use derive_more::From;
use serde::Serialize;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, Serialize, From)]
pub enum Error {
PwdWithSchemeFailedParse,
// -- Modules
#[from]
Scheme(scheme::Error),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,119 @@
// region: --- Modules
mod error;
mod scheme;
pub use self::error::{Error, Result};
pub use scheme::SchemeStatus;
use crate::pwd::scheme::{get_scheme, Scheme, DEFAULT_SCHEME};
use lazy_regex::regex_captures;
use std::str::FromStr;
use uuid::Uuid;
// endregion: --- Modules
// region: --- Types
pub struct ContentToHash {
pub content: String, // Clear content.
pub salt: Uuid,
}
// endregion: --- Types
// region: --- Public Functions
/// Hash the password with the default scheme.
pub fn hash_pwd(to_hash: &ContentToHash) -> Result<String> {
hash_for_scheme(DEFAULT_SCHEME, to_hash)
}
/// Validate if an ContentToHash matches.
pub fn validate_pwd(to_hash: &ContentToHash, pwd_ref: &str) -> Result<SchemeStatus> {
let PwdParts {
scheme_name,
hashed,
} = pwd_ref.parse()?;
validate_for_scheme(&scheme_name, to_hash, &hashed)?;
if scheme_name == DEFAULT_SCHEME {
Ok(SchemeStatus::Ok)
} else {
Ok(SchemeStatus::Outdated)
}
}
// endregion: --- Public Functions
// region: --- Privates
fn hash_for_scheme(scheme_name: &str, to_hash: &ContentToHash) -> Result<String> {
let pwd_hashed = get_scheme(scheme_name)?.hash(to_hash)?;
Ok(format!("#{scheme_name}#{pwd_hashed}"))
}
fn validate_for_scheme(
scheme_name: &str,
to_hash: &ContentToHash,
pwd_ref: &str,
) -> Result<()> {
get_scheme(scheme_name)?.validate(to_hash, pwd_ref)?;
Ok(())
}
struct PwdParts {
/// The scheme only (e.g., "01")
scheme_name: String,
/// The hashed password,
hashed: String,
}
impl FromStr for PwdParts {
type Err = Error;
fn from_str(pwd_with_scheme: &str) -> Result<Self> {
regex_captures!(
r#"^#(\w+)#(.*)"#, // a literal regex
pwd_with_scheme
)
.map(|(_, scheme, hashed)| Self {
scheme_name: scheme.to_string(),
hashed: hashed.to_string(),
})
.ok_or(Error::PwdWithSchemeFailedParse)
}
}
// endregion: --- Privates
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
#[test]
fn test_multi_scheme_ok() -> Result<()> {
// -- Setup & Fixtures
let fx_salt = Uuid::parse_str("f05e8961-d6ad-4086-9e78-a6de065e5453")?;
let fx_to_hash = ContentToHash {
content: "hello world".to_string(),
salt: fx_salt,
};
// -- Exec
let pwd_hashed = hash_for_scheme("01", &fx_to_hash)?;
let pwd_validate = validate_pwd(&fx_to_hash, &pwd_hashed)?;
// -- Check
assert!(
matches!(pwd_validate, SchemeStatus::Outdated),
"status should be SchemeStatus::Outdated"
);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,25 @@
use serde::Serialize;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, Serialize)]
pub enum Error {
Key,
Salt,
Hash,
PwdValidate,
SchemeNotFound(String),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,41 @@
// region: --- Modules
mod error;
mod scheme_01;
mod scheme_02;
pub use self::error::{Error, Result};
use crate::pwd::ContentToHash;
use enum_dispatch::enum_dispatch;
// endregion: --- Modules
pub const DEFAULT_SCHEME: &str = "02";
#[derive(Debug)]
pub enum SchemeStatus {
Ok, // The pwd uses the latest scheme. All good.
Outdated, // The pwd uses an old scheme.
}
#[enum_dispatch]
pub trait Scheme {
fn hash(&self, to_hash: &ContentToHash) -> Result<String>;
fn validate(&self, to_hash: &ContentToHash, pwd_ref: &str) -> Result<()>;
}
#[enum_dispatch(Scheme)]
pub enum SchemeDispatcher {
Scheme01(scheme_01::Scheme01),
Scheme02(scheme_02::Scheme02),
}
pub fn get_scheme(scheme_name: &str) -> Result<impl Scheme> {
match scheme_name {
"01" => Ok(SchemeDispatcher::Scheme01(scheme_01::Scheme01)),
"02" => Ok(SchemeDispatcher::Scheme02(scheme_02::Scheme02)),
_ => Err(Error::SchemeNotFound(scheme_name.to_string())),
}
}

View File

@@ -0,0 +1,76 @@
use super::{Error, Result};
use crate::auth_config;
use crate::pwd::scheme::Scheme;
use crate::pwd::ContentToHash;
use hmac::{Hmac, Mac};
use lib_utils::b64::b64u_encode;
use sha2::Sha512;
pub struct Scheme01;
impl Scheme for Scheme01 {
fn hash(&self, to_hash: &ContentToHash) -> Result<String> {
let key = &auth_config().PWD_KEY;
hash(key, to_hash)
}
fn validate(&self, to_hash: &ContentToHash, raw_pwd_ref: &str) -> Result<()> {
let raw_pwd_new = self.hash(to_hash)?;
if raw_pwd_new == raw_pwd_ref {
Ok(())
} else {
Err(Error::PwdValidate)
}
}
}
fn hash(key: &[u8], to_hash: &ContentToHash) -> Result<String> {
let ContentToHash { content, salt } = to_hash;
// -- Create a HMAC-SHA-512 from key.
let mut hmac_sha512 =
Hmac::<Sha512>::new_from_slice(key).map_err(|_| Error::Key)?;
// -- Add content.
hmac_sha512.update(content.as_bytes());
hmac_sha512.update(salt.as_bytes());
// -- Finalize and b64u encode.
let hmac_result = hmac_sha512.finalize();
let result_bytes = hmac_result.into_bytes();
let result = b64u_encode(result_bytes);
Ok(result)
}
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use crate::auth_config;
use anyhow::Result;
use uuid::Uuid;
#[test]
fn test_scheme_01_hash_into_b64u_ok() -> Result<()> {
// -- Setup & Fixtures
let fx_salt = Uuid::parse_str("f05e8961-d6ad-4086-9e78-a6de065e5453")?;
let fx_key = &auth_config().PWD_KEY; // 512 bits = 64 bytes
let fx_to_hash = ContentToHash {
content: "hello world".to_string(),
salt: fx_salt,
};
// TODO: Need to fix fx_key, and precompute fx_res.
let fx_res = "qO9A90161DoewhNXFwVcnAaljRIVnajvd5zsVDrySCwxpoLwVCACzaz-8Ev2ZpI8RackUTLBVqFI6H5oMe-OIg";
// -- Exec
let res = hash(fx_key, &fx_to_hash)?;
// -- Check
assert_eq!(res, fx_res);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,85 @@
use super::{Error, Result};
use crate::config::auth_config;
use crate::pwd::scheme::Scheme;
use argon2::password_hash::SaltString;
use argon2::{
Algorithm, Argon2, Params, PasswordHash, PasswordHasher as _,
PasswordVerifier as _, Version,
};
use std::sync::OnceLock;
pub struct Scheme02;
impl Scheme for Scheme02 {
fn hash(&self, to_hash: &crate::pwd::ContentToHash) -> Result<String> {
let argon2 = get_argon2();
let salt_b64 = SaltString::encode_b64(to_hash.salt.as_bytes())
.map_err(|_| Error::Salt)?;
let pwd = argon2
.hash_password(to_hash.content.as_bytes(), &salt_b64)
.map_err(|_| Error::Hash)?
.to_string();
Ok(pwd)
}
fn validate(
&self,
to_hash: &crate::pwd::ContentToHash,
pwd_ref: &str,
) -> Result<()> {
let argon2 = get_argon2();
let parsed_hash_ref = PasswordHash::new(pwd_ref).map_err(|_| Error::Hash)?;
argon2
.verify_password(to_hash.content.as_bytes(), &parsed_hash_ref)
.map_err(|_| Error::PwdValidate)
}
}
fn get_argon2() -> &'static Argon2<'static> {
static INSTANCE: OnceLock<Argon2<'static>> = OnceLock::new();
INSTANCE.get_or_init(|| {
let key = &auth_config().PWD_KEY;
Argon2::new_with_secret(
key,
Algorithm::Argon2id, // Same as Argon2::default()
Version::V0x13, // Same as Argon2::default()
Params::default(),
)
.unwrap() // TODO - needs to fail early
})
}
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use crate::pwd::ContentToHash;
use anyhow::Result;
use uuid::Uuid;
#[test]
fn test_scheme_02_hash_into_b64u_ok() -> Result<()> {
// -- Setup & Fixtures
let fx_to_hash = ContentToHash {
content: "hello world".to_string(),
salt: Uuid::parse_str("f05e8961-d6ad-4086-9e78-a6de065e5453")?,
};
let fx_res = "$argon2id$v=19$m=19456,t=2,p=1$8F6JYdatQIaeeKbeBl5UUw$TaRnmmbDdQ1aTzk2qQ2yQzPQoZfnKqhrfuTH/TRP5V4";
// -- Exec
let scheme = Scheme02;
let res = scheme.hash(&fx_to_hash)?;
// -- Check
assert_eq!(res, fx_res);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,28 @@
use serde::Serialize;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, Serialize)]
pub enum Error {
HmacFailNewFromSlice,
InvalidFormat,
CannotDecodeIdent,
CannotDecodeExp,
SignatureNotMatching,
ExpNotIso,
Expired,
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,245 @@
// region: --- Modules
mod error;
pub use self::error::{Error, Result};
use crate::config::auth_config;
use hmac::{Hmac, Mac};
use lib_utils::b64::{b64u_decode_to_string, b64u_encode};
use lib_utils::time::{now_utc, now_utc_plus_sec_str, parse_utc};
use sha2::Sha512;
use std::fmt::Display;
use std::str::FromStr;
use uuid::Uuid;
// endregion: --- Modules
// region: --- Token Type
/// String format: `ident_b64u.exp_b64u.sign_b64u`
#[derive(Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct Token {
pub ident: String, // Identifier (username for example).
pub exp: String, // Expiration date in Rfc3339.
pub sign_b64u: String, // Signature, base64url encoded.
}
impl FromStr for Token {
type Err = Error;
fn from_str(token_str: &str) -> std::result::Result<Self, Self::Err> {
let splits: Vec<&str> = token_str.split('.').collect();
if splits.len() != 3 {
return Err(Error::InvalidFormat);
}
let (ident_b64u, exp_b64u, sign_b64u) = (splits[0], splits[1], splits[2]);
Ok(Self {
ident: b64u_decode_to_string(ident_b64u)
.map_err(|_| Error::CannotDecodeIdent)?,
exp: b64u_decode_to_string(exp_b64u)
.map_err(|_| Error::CannotDecodeExp)?,
sign_b64u: sign_b64u.to_string(),
})
}
}
impl Display for Token {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}.{}.{}",
b64u_encode(&self.ident),
b64u_encode(&self.exp),
self.sign_b64u
)
}
}
// endregion: --- Token Type
// region: --- Web Token Gen and Validation
pub fn generate_web_token(user: &str, salt: Uuid) -> Result<Token> {
let config = &auth_config();
_generate_token(user, config.TOKEN_DURATION_SEC, salt, &config.TOKEN_KEY)
}
pub fn validate_web_token(origin_token: &Token, salt: Uuid) -> Result<()> {
let config = &auth_config();
_validate_token_sign_and_exp(origin_token, salt, &config.TOKEN_KEY)?;
Ok(())
}
// endregion: --- Web Token Gen and Validation
// region: --- (private) Token Gen and Validation
fn _generate_token(
ident: &str,
duration_sec: f64,
salt: Uuid,
key: &[u8],
) -> Result<Token> {
// -- Compute the two first components.
let ident = ident.to_string();
let exp = now_utc_plus_sec_str(duration_sec);
// -- Sign the two first components.
let sign_b64u = _token_sign_into_b64u(&ident, &exp, salt, key)?;
Ok(Token {
ident,
exp,
sign_b64u,
})
}
fn _validate_token_sign_and_exp(
origin_token: &Token,
salt: Uuid,
key: &[u8],
) -> Result<()> {
// -- Validate signature.
let new_sign_b64u =
_token_sign_into_b64u(&origin_token.ident, &origin_token.exp, salt, key)?;
if new_sign_b64u != origin_token.sign_b64u {
return Err(Error::SignatureNotMatching);
}
// -- Validate expiration.
let origin_exp = parse_utc(&origin_token.exp).map_err(|_| Error::ExpNotIso)?;
let now = now_utc();
if origin_exp < now {
return Err(Error::Expired);
}
Ok(())
}
/// Create token signature from token parts
/// and salt.
fn _token_sign_into_b64u(
ident: &str,
exp: &str,
salt: Uuid,
key: &[u8],
) -> Result<String> {
let content = format!("{}.{}", b64u_encode(ident), b64u_encode(exp));
// -- Create a HMAC-SHA-512 from key.
let mut hmac_sha512 = Hmac::<Sha512>::new_from_slice(key)
.map_err(|_| Error::HmacFailNewFromSlice)?;
// -- Add content.
hmac_sha512.update(content.as_bytes());
hmac_sha512.update(salt.as_bytes());
// -- Finalize and b64u encode.
let hmac_result = hmac_sha512.finalize();
let result_bytes = hmac_result.into_bytes();
let result = b64u_encode(result_bytes);
Ok(result)
}
// endregion: --- (private) Token Gen and Validation
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use anyhow::Result;
use std::thread;
use std::time::Duration;
#[test]
fn test_token_display_ok() -> Result<()> {
// -- Fixtures
let fx_token_str =
"ZngtaWRlbnQtMDE.MjAyMy0wNS0xN1QxNTozMDowMFo.some-sign-b64u-encoded";
let fx_token = Token {
ident: "fx-ident-01".to_string(),
exp: "2023-05-17T15:30:00Z".to_string(),
sign_b64u: "some-sign-b64u-encoded".to_string(),
};
// -- Exec & Check
assert_eq!(fx_token.to_string(), fx_token_str);
Ok(())
}
#[test]
fn test_token_from_str_ok() -> Result<()> {
// -- Fixtures
let fx_token_str =
"ZngtaWRlbnQtMDE.MjAyMy0wNS0xN1QxNTozMDowMFo.some-sign-b64u-encoded";
let fx_token = Token {
ident: "fx-ident-01".to_string(),
exp: "2023-05-17T15:30:00Z".to_string(),
sign_b64u: "some-sign-b64u-encoded".to_string(),
};
// -- Exec
let token: Token = fx_token_str.parse()?;
// -- Check
assert_eq!(token, fx_token);
Ok(())
}
#[test]
fn test_validate_web_token_ok() -> Result<()> {
// -- Setup & Fixtures
let fx_user = "user_one";
let fx_salt =
Uuid::parse_str("f05e8961-d6ad-4086-9e78-a6de065e5453").unwrap();
let fx_duration_sec = 0.02; // 20ms
let token_key = &auth_config().TOKEN_KEY;
let fx_token =
_generate_token(fx_user, fx_duration_sec, fx_salt, token_key)?;
// -- Exec
thread::sleep(Duration::from_millis(10));
let res = validate_web_token(&fx_token, fx_salt);
// -- Check
res?;
Ok(())
}
#[test]
fn test_validate_web_token_err_expired() -> Result<()> {
// -- Setup & Fixtures
let fx_user = "user_one";
let fx_salt =
Uuid::parse_str("f05e8961-d6ad-4086-9e78-a6de065e5453").unwrap();
let fx_duration_sec = 0.01; // 10ms
let token_key = &auth_config().TOKEN_KEY;
let fx_token =
_generate_token(fx_user, fx_duration_sec, fx_salt, token_key)?;
// -- Exec
thread::sleep(Duration::from_millis(20));
let res = validate_web_token(&fx_token, fx_salt);
// -- Check
assert!(
matches!(res, Err(Error::Expired)),
"Should have matched `Err(Error::Expired)` but was `{res:?}`"
);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,43 @@
[package]
name = "lib-core"
version = "0.1.0"
edition = "2021"
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
# -- App Libs
lib-utils = { path = "../../libs/lib-utils"}
lib-auth = { path = "../../libs/lib-auth"}
# -- Async
tokio = { version = "1", features = ["full"] }
async-trait = "0.1"
# -- Json
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = {version = "3", features = ["time_0_3"]}
# -- Data
sqlx = { version = "0.7", features = [ "runtime-tokio", "postgres", "uuid" ] }
sea-query = "0.30"
sea-query-binder = { version = "0.5", features = ["sqlx-postgres", "with-uuid", "with-time" ] }
modql = {version = "0.3.4", features = ["with-sea-query"]}
# -- Tracing
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# -- Others
uuid = {version = "1", features = ["v4","fast-rng",]}
time = {version = "0.3", features = ["formatting", "parsing", "serde"]}
strum_macros = "0.25"
enum_dispatch = "0.3"
derive_more = {version = "1.0.0-beta", features = ["from"] }
[dev-dependencies]
anyhow = "1"
httpc-test = "0.1.1"
serial_test = "2"

View File

@@ -0,0 +1,101 @@
use crate::ctx::Ctx;
use crate::model::user::{User, UserBmc};
use crate::model::ModelManager;
use sqlx::postgres::PgPoolOptions;
use sqlx::{Pool, Postgres};
use std::fs;
use std::path::{Path, PathBuf};
use std::time::Duration;
use tracing::info;
type Db = Pool<Postgres>;
// NOTE: Hardcode to prevent deployed system db update.
const PG_DEV_POSTGRES_URL: &str = "postgres://postgres:welcome@localhost/postgres";
const PG_DEV_APP_URL: &str = "postgres://app_user:dev_only_pwd@localhost/app_db";
// sql files
const SQL_RECREATE_DB_FILE_NAME: &str = "00-recreate-db.sql";
const SQL_DIR: &str = "sql/dev_initial";
const DEMO_PWD: &str = "welcome";
pub async fn init_dev_db() -> Result<(), Box<dyn std::error::Error>> {
info!("{:<12} - init_dev_db()", "FOR-DEV-ONLY");
// -- Get the sql_dir
// Note: This is because cargo test and cargo run won't give the same
// current_dir given the worspace layout.
let current_dir = std::env::current_dir().unwrap();
let v: Vec<_> = current_dir.components().collect();
let path_comp = v.get(v.len().wrapping_sub(3));
let base_dir = if Some(true) == path_comp.map(|c| c.as_os_str() == "crates") {
v[..v.len() - 3].iter().collect::<PathBuf>()
} else {
current_dir.clone()
};
let sql_dir = base_dir.join(SQL_DIR);
// -- Create the app_db/app_user with the postgres user.
{
let sql_recreate_db_file = sql_dir.join(SQL_RECREATE_DB_FILE_NAME);
let root_db = new_db_pool(PG_DEV_POSTGRES_URL).await?;
pexec(&root_db, &sql_recreate_db_file).await?;
}
// -- Get sql files.
let mut paths: Vec<PathBuf> = fs::read_dir(sql_dir)?
.filter_map(|entry| entry.ok().map(|e| e.path()))
.collect();
paths.sort();
// -- SQL Execute each file.
let app_db = new_db_pool(PG_DEV_APP_URL).await?;
for path in paths {
let path_str = path.to_string_lossy();
if path_str.ends_with(".sql")
&& !path_str.ends_with(SQL_RECREATE_DB_FILE_NAME)
{
pexec(&app_db, &path).await?;
}
}
// -- Init model layer.
let mm = ModelManager::new().await?;
let ctx = Ctx::root_ctx();
// -- Set demo1 pwd
let demo1_user: User = UserBmc::first_by_username(&ctx, &mm, "demo1")
.await?
.unwrap();
UserBmc::update_pwd(&ctx, &mm, demo1_user.id, DEMO_PWD).await?;
info!("{:<12} - init_dev_db - set demo1 pwd", "FOR-DEV-ONLY");
Ok(())
}
async fn pexec(db: &Db, file: &Path) -> Result<(), sqlx::Error> {
info!("{:<12} - pexec: {file:?}", "FOR-DEV-ONLY");
// -- Read the file.
let content = fs::read_to_string(file)?;
// FIXME: Make the split more sql proof.
let sqls: Vec<&str> = content.split(';').collect();
for sql in sqls {
sqlx::query(sql).execute(db).await?;
}
Ok(())
}
async fn new_db_pool(db_con_url: &str) -> Result<Db, sqlx::Error> {
PgPoolOptions::new()
.max_connections(1)
.acquire_timeout(Duration::from_millis(500))
.connect(db_con_url)
.await
}

View File

@@ -0,0 +1,80 @@
// region: --- Modules
mod dev_db;
use crate::ctx::Ctx;
use crate::model::project::{ProjectBmc, ProjectForCreate};
use crate::model::task::{Task, TaskBmc, TaskForCreate};
use crate::model::{self, ModelManager};
use tokio::sync::OnceCell;
use tracing::info;
// endregion: --- Modules
/// Initialize environment for local development.
/// (for early development, will be called from main()).
pub async fn init_dev() {
static INIT: OnceCell<()> = OnceCell::const_new();
INIT.get_or_init(|| async {
info!("{:<12} - init_dev_all()", "FOR-DEV-ONLY");
dev_db::init_dev_db().await.unwrap();
})
.await;
}
/// Initialize test environment.
pub async fn init_test() -> ModelManager {
static INIT: OnceCell<ModelManager> = OnceCell::const_new();
let mm = INIT
.get_or_init(|| async {
init_dev().await;
ModelManager::new().await.unwrap()
})
.await;
mm.clone()
}
pub async fn seed_project(
ctx: &Ctx,
mm: &ModelManager,
name: &str,
) -> model::Result<i64> {
ProjectBmc::create(
ctx,
mm,
ProjectForCreate {
name: name.to_string(),
},
)
.await
}
pub async fn seed_tasks(
ctx: &Ctx,
mm: &ModelManager,
project_id: i64,
titles: &[&str],
) -> model::Result<Vec<Task>> {
let mut tasks = Vec::new();
for title in titles {
let id = TaskBmc::create(
ctx,
mm,
TaskForCreate {
project_id,
title: title.to_string(),
},
)
.await?;
let task = TaskBmc::get(ctx, mm, id).await?;
tasks.push(task);
}
Ok(tasks)
}

View File

@@ -0,0 +1,33 @@
use lib_utils::envs::get_env;
use std::sync::OnceLock;
pub fn core_config() -> &'static CoreConfig {
static INSTANCE: OnceLock<CoreConfig> = OnceLock::new();
INSTANCE.get_or_init(|| {
CoreConfig::load_from_env().unwrap_or_else(|ex| {
panic!("FATAL - WHILE LOADING CONF - Cause: {ex:?}")
})
})
}
#[allow(non_snake_case)]
pub struct CoreConfig {
// -- Db
pub DB_URL: String,
// -- Web
pub WEB_FOLDER: String,
}
impl CoreConfig {
fn load_from_env() -> lib_utils::envs::Result<CoreConfig> {
Ok(CoreConfig {
// -- Db
DB_URL: get_env("SERVICE_DB_URL")?,
// -- Web
WEB_FOLDER: get_env("SERVICE_WEB_FOLDER")?,
})
}
}

View File

@@ -0,0 +1,21 @@
use serde::Serialize;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, Serialize)]
pub enum Error {
CtxCannotNewRootCtx,
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,34 @@
// region: --- Modules
mod error;
pub use self::error::{Error, Result};
// endregion: --- Modules
#[derive(Clone, Debug)]
pub struct Ctx {
user_id: i64,
}
// Constructors.
impl Ctx {
pub fn root_ctx() -> Self {
Ctx { user_id: 0 }
}
pub fn new(user_id: i64) -> Result<Self> {
if user_id == 0 {
Err(Error::CtxCannotNewRootCtx)
} else {
Ok(Self { user_id })
}
}
}
// Property Accessors.
impl Ctx {
pub fn user_id(&self) -> i64 {
self.user_id
}
}

View File

@@ -0,0 +1,8 @@
pub mod config;
pub mod ctx;
pub mod model;
// #[cfg(test)] // Commented during early development.
pub mod _dev_utils;
use config::core_config;

View File

@@ -0,0 +1,254 @@
use crate::ctx::Ctx;
use crate::model::ModelManager;
use crate::model::{Error, Result};
use lib_utils::time::now_utc;
use modql::field::{Field, Fields, HasFields};
use modql::filter::{FilterGroups, ListOptions};
use modql::SIden;
use sea_query::{
Condition, Expr, Iden, IntoIden, PostgresQueryBuilder, Query, TableRef,
};
use sea_query_binder::SqlxBinder;
use sqlx::postgres::PgRow;
use sqlx::FromRow;
const LIST_LIMIT_DEFAULT: i64 = 1000;
const LIST_LIMIT_MAX: i64 = 5000;
#[derive(Iden)]
pub enum CommonIden {
Id,
}
#[derive(Iden)]
pub enum TimestampIden {
Cid,
Ctime,
Mid,
Mtime,
}
pub trait DbBmc {
const TABLE: &'static str;
fn table_ref() -> TableRef {
TableRef::Table(SIden(Self::TABLE).into_iden())
}
}
pub fn compute_list_options(
list_options: Option<ListOptions>,
) -> Result<ListOptions> {
if let Some(mut list_options) = list_options {
// Validate the limit.
if let Some(limit) = list_options.limit {
if limit > LIST_LIMIT_MAX {
return Err(Error::ListLimitOverMax {
max: LIST_LIMIT_MAX,
actual: limit,
});
}
}
// Set the default limit if no limit
else {
list_options.limit = Some(LIST_LIMIT_DEFAULT);
}
Ok(list_options)
}
// When None, return default
else {
Ok(ListOptions {
limit: Some(LIST_LIMIT_DEFAULT),
offset: None,
order_bys: Some("id".into()),
})
}
}
pub async fn create<MC, E>(ctx: &Ctx, mm: &ModelManager, data: E) -> Result<i64>
where
MC: DbBmc,
E: HasFields,
{
let db = mm.db();
// -- Extract fields (name / sea-query value expression)
let mut fields = data.not_none_fields();
add_timestamps_for_create(&mut fields, ctx.user_id());
let (columns, sea_values) = fields.for_sea_insert();
// -- Build query
let mut query = Query::insert();
query
.into_table(MC::table_ref())
.columns(columns)
.values(sea_values)?
.returning(Query::returning().columns([CommonIden::Id]));
// -- Exec query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let (id,) = sqlx::query_as_with::<_, (i64,), _>(&sql, values)
.fetch_one(db)
.await?;
Ok(id)
}
pub async fn get<MC, E>(_ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<E>
where
MC: DbBmc,
E: for<'r> FromRow<'r, PgRow> + Unpin + Send,
E: HasFields,
{
let db = mm.db();
// -- Build query
let mut query = Query::select();
query
.from(MC::table_ref())
.columns(E::field_column_refs())
.and_where(Expr::col(CommonIden::Id).eq(id));
// -- Exec query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let entity = sqlx::query_as_with::<_, E, _>(&sql, values)
.fetch_optional(db)
.await?
.ok_or(Error::EntityNotFound {
entity: MC::TABLE,
id,
})?;
Ok(entity)
}
pub async fn list<MC, E, F>(
_ctx: &Ctx,
mm: &ModelManager,
filter: Option<F>,
list_options: Option<ListOptions>,
) -> Result<Vec<E>>
where
MC: DbBmc,
F: Into<FilterGroups>,
E: for<'r> FromRow<'r, PgRow> + Unpin + Send,
E: HasFields,
{
let db = mm.db();
// -- Build the query
let mut query = Query::select();
query.from(MC::table_ref()).columns(E::field_column_refs());
// condition from filter
if let Some(filter) = filter {
let filters: FilterGroups = filter.into();
let cond: Condition = filters.try_into()?;
query.cond_where(cond);
}
// list options
let list_options = compute_list_options(list_options)?;
list_options.apply_to_sea_query(&mut query);
// -- Execute the query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let entities = sqlx::query_as_with::<_, E, _>(&sql, values)
.fetch_all(db)
.await?;
Ok(entities)
}
pub async fn update<MC, E>(
ctx: &Ctx,
mm: &ModelManager,
id: i64,
data: E,
) -> Result<()>
where
MC: DbBmc,
E: HasFields,
{
let db = mm.db();
let mut fields = data.not_none_fields();
add_timestamps_for_update(&mut fields, ctx.user_id());
let fields = fields.for_sea_update();
// -- Build query
let mut query = Query::update();
query
.table(MC::table_ref())
.values(fields)
.and_where(Expr::col(CommonIden::Id).eq(id));
// -- Execute query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let count = sqlx::query_with(&sql, values)
.execute(db)
.await?
.rows_affected();
// -- Check result
if count == 0 {
Err(Error::EntityNotFound {
entity: MC::TABLE,
id,
})
} else {
Ok(())
}
}
pub async fn delete<MC>(_ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<()>
where
MC: DbBmc,
{
let db = mm.db();
// -- Build query
let mut query = Query::delete();
query
.from_table(MC::table_ref())
.and_where(Expr::col(CommonIden::Id).eq(id));
// -- Execute query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let count = sqlx::query_with(&sql, values)
.execute(db)
.await?
.rows_affected();
// -- Check result
if count == 0 {
Err(Error::EntityNotFound {
entity: MC::TABLE,
id,
})
} else {
Ok(())
}
}
// region: --- Utils
/// Update the timestamps info for create
/// (e.g., cid, ctime, and mid, mtime will be updated with the same values)
pub fn add_timestamps_for_create(fields: &mut Fields, user_id: i64) {
let now = now_utc();
fields.push(Field::new(TimestampIden::Cid.into_iden(), user_id.into()));
fields.push(Field::new(TimestampIden::Ctime.into_iden(), now.into()));
fields.push(Field::new(TimestampIden::Mid.into_iden(), user_id.into()));
fields.push(Field::new(TimestampIden::Mtime.into_iden(), now.into()));
}
/// Update the timestamps info only for update.
/// (.e.g., only mid, mtime will be udpated)
pub fn add_timestamps_for_update(fields: &mut Fields, user_id: i64) {
let now = now_utc();
fields.push(Field::new(TimestampIden::Mid.into_iden(), user_id.into()));
fields.push(Field::new(TimestampIden::Mtime.into_iden(), now.into()));
}
// endregion: --- Utils

View File

@@ -0,0 +1,47 @@
use crate::model::store;
use derive_more::From;
use lib_auth::pwd;
use serde::Serialize;
use serde_with::{serde_as, DisplayFromStr};
pub type Result<T> = core::result::Result<T, Error>;
#[serde_as]
#[derive(Debug, Serialize, From)]
pub enum Error {
EntityNotFound {
entity: &'static str,
id: i64,
},
ListLimitOverMax {
max: i64,
actual: i64,
},
// -- Modules
#[from]
Pwd(pwd::Error),
#[from]
Store(store::Error),
// -- Externals
#[from]
SeaQuery(#[serde_as(as = "DisplayFromStr")] sea_query::error::Error),
#[from]
Sqlx(#[serde_as(as = "DisplayFromStr")] sqlx::Error),
#[from]
ModqlIntoSea(#[serde_as(as = "DisplayFromStr")] modql::filter::IntoSeaError),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,54 @@
//! Model Layer
//!
//! Design:
//!
//! - The Model layer normalizes the application's data type
//! structures and access.
//! - All application code data access must go through the Model layer.
//! - The `ModelManager` holds the internal states/resources
//! needed by ModelControllers to access data.
//! (e.g., db_pool, S3 client, redis client).
//! - Model Controllers (e.g., `TaskBmc`, `ProjectBmc`) implement
//! CRUD and other data access methods on a given "entity"
//! (e.g., `Task`, `Project`).
//! (`Bmc` is short for Backend Model Controller).
//! - In frameworks like Axum, Tauri, `ModelManager` are typically used as App State.
//! - ModelManager are designed to be passed as an argument
//! to all Model Controllers functions.
//!
// region: --- Modules
mod base;
mod error;
pub mod modql_utils;
pub mod project;
mod store;
pub mod task;
pub mod user;
pub use self::error::{Error, Result};
use crate::model::store::{new_db_pool, Db};
// endregion: --- Modules
#[derive(Clone)]
pub struct ModelManager {
db: Db,
}
impl ModelManager {
/// Constructor
pub async fn new() -> Result<Self> {
let db = new_db_pool().await?;
Ok(ModelManager { db })
}
/// Returns the sqlx db pool reference.
/// (Only for the model layer)
pub(in crate::model) fn db(&self) -> &Db {
&self.db
}
}

View File

@@ -0,0 +1,7 @@
use time::serde::rfc3339;
pub fn time_to_sea_value(
json_value: serde_json::Value,
) -> modql::filter::SeaResult<sea_query::Value> {
Ok(rfc3339::deserialize(json_value)?.into())
}

View File

@@ -0,0 +1,119 @@
use crate::ctx::Ctx;
use crate::model::base::{self, DbBmc};
use crate::model::modql_utils::time_to_sea_value;
use crate::model::ModelManager;
use crate::model::Result;
use lib_utils::time::Rfc3339;
use modql::field::Fields;
use modql::filter::{FilterNodes, OpValsString, OpValsValue};
use modql::filter::{ListOptions, OpValsInt64};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use sqlx::types::time::OffsetDateTime;
use sqlx::FromRow;
// region: --- Project Types
#[serde_as]
#[derive(Debug, Clone, Fields, FromRow, Serialize)]
pub struct Project {
pub id: i64,
pub owner_id: i64,
pub name: String,
// -- Timestamps
// (creator and last modified user_id/time)
pub cid: i64,
#[serde_as(as = "Rfc3339")]
pub ctime: OffsetDateTime,
pub mid: i64,
#[serde_as(as = "Rfc3339")]
pub mtime: OffsetDateTime,
}
#[derive(Fields, Deserialize)]
pub struct ProjectForCreate {
pub name: String,
}
#[derive(Fields, Deserialize)]
pub struct ProjectForUpdate {
pub name: Option<String>,
pub owner_id: Option<i64>,
}
/// The `ProjectForCreateInner` contains all necessary properties
/// for a database insert.
/// NOTE: In this design, `project.owner_id` is intrinsic to the
/// `ProjectCreate` action, and should not be exposed to the API.
/// It must be respected in rights by referencing the user initiating the action.
/// Hence, in this scenario, we differentiate between `ProjectForCreate` (the public data structure)
/// and `ProjectForCreateInner` (the representation of the data to be executed, i.e., inserted).
/// (e.g., `owner_id` which is a db required field)
#[derive(Fields)]
struct ProjectForCreateInner {
pub name: String,
pub owner_id: i64,
}
#[derive(FilterNodes, Default, Deserialize)]
pub struct ProjectFilter {
id: Option<OpValsInt64>,
name: Option<OpValsString>,
cid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
ctime: Option<OpValsValue>,
mid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
mtime: Option<OpValsValue>,
}
// endregion: --- Project Types
// region: --- ProjectBmc
pub struct ProjectBmc;
impl DbBmc for ProjectBmc {
const TABLE: &'static str = "project";
}
impl ProjectBmc {
pub async fn create(
ctx: &Ctx,
mm: &ModelManager,
project_c: ProjectForCreate,
) -> Result<i64> {
let project_c = ProjectForCreateInner {
name: project_c.name,
owner_id: ctx.user_id(),
};
base::create::<Self, _>(ctx, mm, project_c).await
}
pub async fn get(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<Project> {
base::get::<Self, _>(ctx, mm, id).await
}
pub async fn list(
ctx: &Ctx,
mm: &ModelManager,
filter: Option<Vec<ProjectFilter>>,
list_options: Option<ListOptions>,
) -> Result<Vec<Project>> {
base::list::<Self, _, _>(ctx, mm, filter, list_options).await
}
pub async fn update(
ctx: &Ctx,
mm: &ModelManager,
id: i64,
project_u: ProjectForUpdate,
) -> Result<()> {
base::update::<Self, _>(ctx, mm, id, project_u).await
}
pub async fn delete(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<()> {
base::delete::<Self>(ctx, mm, id).await
}
}
// endregion: --- ProjectBmc

View File

@@ -0,0 +1,21 @@
use serde::Serialize;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, Serialize)]
pub enum Error {
FailToCreatePool(String),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,33 @@
// region: --- Modules
mod error;
pub use self::error::{Error, Result};
use crate::core_config;
use sqlx::postgres::PgPoolOptions;
use sqlx::{Pool, Postgres};
// endregion: --- Modules
pub type Db = Pool<Postgres>;
pub async fn new_db_pool() -> Result<Db> {
// * See NOTE 1) below
let max_connections = if cfg!(test) { 1 } else { 5 };
PgPoolOptions::new()
.max_connections(max_connections)
.connect(&core_config().DB_URL)
.await
.map_err(|ex| Error::FailToCreatePool(ex.to_string()))
}
// NOTE 1) This is not an ideal situation; however, with sqlx 0.7.1, when executing `cargo test`, some tests that use sqlx fail at a
// rather low level (in the tokio scheduler). It appears to be a low-level thread/async issue, as removing/adding
// tests causes different tests to fail. The cause remains uncertain, but setting max_connections to 1 resolves the issue.
// The good news is that max_connections still function normally for a regular run.
// This issue is likely due to the unique requirements unit tests impose on their execution, and therefore,
// while not ideal, it should serve as an acceptable temporary solution.
// It's a very challenging issue to investigate and narrow down. The alternative would have been to stick with sqlx 0.6.x, which
// is potentially less ideal and might lead to confusion as to why we are maintaining the older version in this blueprint.

View File

@@ -0,0 +1,400 @@
use crate::ctx::Ctx;
use crate::model::base::{self, DbBmc};
use crate::model::modql_utils::time_to_sea_value;
use crate::model::ModelManager;
use crate::model::Result;
use lib_utils::time::Rfc3339;
use modql::field::Fields;
use modql::filter::{
FilterNodes, ListOptions, OpValsBool, OpValsInt64, OpValsString, OpValsValue,
};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use sqlx::types::time::OffsetDateTime;
use sqlx::FromRow;
// region: --- Task Types
#[serde_as]
#[derive(Debug, Clone, Fields, FromRow, Serialize)]
pub struct Task {
pub id: i64,
pub project_id: i64,
pub title: String,
pub done: bool,
// -- Timestamps
// (creator and last modified user_id/time)
pub cid: i64,
#[serde_as(as = "Rfc3339")]
pub ctime: OffsetDateTime,
pub mid: i64,
#[serde_as(as = "Rfc3339")]
pub mtime: OffsetDateTime,
}
#[derive(Fields, Deserialize)]
pub struct TaskForCreate {
pub title: String,
pub project_id: i64,
}
#[derive(Fields, Deserialize, Default)]
pub struct TaskForUpdate {
pub title: Option<String>,
pub done: Option<bool>,
}
#[derive(FilterNodes, Deserialize, Default, Debug)]
pub struct TaskFilter {
id: Option<OpValsInt64>,
project_id: Option<OpValsInt64>,
title: Option<OpValsString>,
done: Option<OpValsBool>,
cid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
ctime: Option<OpValsValue>,
mid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
mtime: Option<OpValsValue>,
}
// endregion: --- Task Types
// region: --- TaskBmc
pub struct TaskBmc;
impl DbBmc for TaskBmc {
const TABLE: &'static str = "task";
}
impl TaskBmc {
pub async fn create(
ctx: &Ctx,
mm: &ModelManager,
task_c: TaskForCreate,
) -> Result<i64> {
base::create::<Self, _>(ctx, mm, task_c).await
}
pub async fn get(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<Task> {
base::get::<Self, _>(ctx, mm, id).await
}
pub async fn list(
ctx: &Ctx,
mm: &ModelManager,
filter: Option<Vec<TaskFilter>>,
list_options: Option<ListOptions>,
) -> Result<Vec<Task>> {
base::list::<Self, _, _>(ctx, mm, filter, list_options).await
}
pub async fn update(
ctx: &Ctx,
mm: &ModelManager,
id: i64,
task_u: TaskForUpdate,
) -> Result<()> {
base::update::<Self, _>(ctx, mm, id, task_u).await
}
pub async fn delete(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<()> {
base::delete::<Self>(ctx, mm, id).await
}
}
// endregion: --- TaskBmc
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use crate::_dev_utils;
use crate::model::project::ProjectBmc;
use crate::model::Error;
use anyhow::Result;
use lib_utils::time::{format_time, now_utc};
use modql::filter::OpValString;
use serde_json::json;
use serial_test::serial;
use std::time::Duration;
use tokio::time::sleep;
#[serial]
#[tokio::test]
async fn test_create_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_title = "test_create_ok title";
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_create_ok project for task ")
.await?;
// -- Exec
let task_c = TaskForCreate {
project_id: fx_project_id,
title: fx_title.to_string(),
};
let id = TaskBmc::create(&ctx, &mm, task_c).await?;
// -- Check
let task = TaskBmc::get(&ctx, &mm, id).await?;
assert_eq!(task.title, fx_title);
// -- Clean
TaskBmc::delete(&ctx, &mm, id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_get_err_not_found() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_id = 100;
// -- Exec
let res = TaskBmc::get(&ctx, &mm, fx_id).await;
// -- Check
assert!(
matches!(
res,
Err(Error::EntityNotFound {
entity: "task",
id: 100
})
),
"EntityNotFound not matching"
);
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_all_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &["test_list_all_ok-task 01", "test_list_all_ok-task 02"];
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_list_all_ok project for task")
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter = TaskFilter {
project_id: Some(fx_project_id.into()),
..Default::default()
};
let tasks = TaskBmc::list(&ctx, &mm, Some(vec![filter]), None).await?;
// -- Check
assert_eq!(tasks.len(), 2, "number of seeded tasks.");
// -- Clean
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_by_title_contains_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &[
"test_list_by_title_contains_ok 01",
"test_list_by_title_contains_ok 02.1",
"test_list_by_title_contains_ok 02.2",
];
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"test_list_by_title_contains_ok project for task ",
)
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter = TaskFilter {
project_id: Some(fx_project_id.into()),
title: Some(
OpValString::Contains("by_title_contains_ok 02".to_string()).into(),
),
..Default::default()
};
let tasks = TaskBmc::list(&ctx, &mm, Some(vec![filter]), None).await?;
// -- Check
assert_eq!(tasks.len(), 2);
// -- Cleanup
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_with_list_options_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &[
"test_list_with_list_options_ok 01",
"test_list_with_list_options_ok 02.1",
"test_list_with_list_options_ok 02.2",
];
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"test_list_with_list_options_ok project for task ",
)
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter: TaskFilter = TaskFilter {
project_id: Some(fx_project_id.into()),
..Default::default()
};
let list_options: ListOptions = serde_json::from_value(json! ({
"offset": 0,
"limit": 2,
"order_bys": "!title"
}))?;
let tasks =
TaskBmc::list(&ctx, &mm, Some(vec![filter]), Some(list_options)).await?;
// -- Check
let titles: Vec<String> =
tasks.iter().map(|t| t.title.to_string()).collect();
assert_eq!(titles.len(), 2);
assert_eq!(
&titles,
&[
"test_list_with_list_options_ok 02.2",
"test_list_with_list_options_ok 02.1"
]
);
// -- Cleanup
// Will delete associated tasks
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_update_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_title = "test_update_ok - task 01";
let fx_title_new = "test_update_ok - task 01 - new";
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_update_ok project for task")
.await?;
let fx_task = _dev_utils::seed_tasks(&ctx, &mm, fx_project_id, &[fx_title])
.await?
.remove(0);
// -- Exec
TaskBmc::update(
&ctx,
&mm,
fx_task.id,
TaskForUpdate {
title: Some(fx_title_new.to_string()),
..Default::default()
},
)
.await?;
// -- Check
let task = TaskBmc::get(&ctx, &mm, fx_task.id).await?;
assert_eq!(task.title, fx_title_new);
// -- Clean
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_by_ctime_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"project for tasks test_list_by_ctime_ok",
)
.await?;
let fx_titles_01 = &[
"test_list_by_ctime_ok 01.1",
"test_list_by_ctime_ok 01.2",
"test_list_by_ctime_ok 01.3",
];
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles_01).await?;
let time_marker = format_time(now_utc());
sleep(Duration::from_millis(300)).await;
let fx_titles_02 =
&["test_list_by_ctime_ok 02.1", "test_list_by_ctime_ok 02.2"];
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles_02).await?;
// -- Exec
let filter_json = json! ({
"ctime": {"$gt": time_marker}, // time in Rfc3339
});
let filter = vec![serde_json::from_value(filter_json)?];
let tasks = TaskBmc::list(&ctx, &mm, Some(filter), None).await?;
// -- Check
let titles: Vec<String> = tasks.into_iter().map(|t| t.title).collect();
assert_eq!(titles.len(), 2);
assert_eq!(&titles, fx_titles_02);
// -- Cleanup
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_delete_err_not_found() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_id = 100;
// -- Exec
let res = TaskBmc::delete(&ctx, &mm, fx_id).await;
// -- Check
assert!(
matches!(
res,
Err(Error::EntityNotFound {
entity: "task",
id: 100
})
),
"EntityNotFound not matching"
);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,400 @@
use crate::ctx::Ctx;
use crate::model::base::{self, DbBmc};
use crate::model::modql_utils::time_to_sea_value;
use crate::model::ModelManager;
use crate::model::Result;
use lib_utils::time::Rfc3339;
use modql::field::Fields;
use modql::filter::{
FilterNodes, ListOptions, OpValsBool, OpValsInt64, OpValsString, OpValsValue,
};
use serde::{Deserialize, Serialize};
use serde_with::serde_as;
use sqlx::types::time::OffsetDateTime;
use sqlx::FromRow;
// region: --- Task Types
#[serde_as]
#[derive(Debug, Clone, Fields, FromRow, Serialize)]
pub struct Task {
pub id: i64,
pub project_id: i64,
pub title: String,
pub done: bool,
// -- Timestamps
// (creator and last modified user_id/time)
pub cid: i64,
#[serde_as(as = "Rfc3339")]
pub ctime: OffsetDateTime,
pub mid: i64,
#[serde_as(as = "Rfc3339")]
pub mtime: OffsetDateTime,
}
#[derive(Fields, Deserialize)]
pub struct TaskForCreate {
pub title: String,
pub project_id: i64,
}
#[derive(Fields, Deserialize, Default)]
pub struct TaskForUpdate {
pub title: Option<String>,
pub done: Option<bool>,
}
#[derive(FilterNodes, Deserialize, Default, Debug)]
pub struct TaskFilter {
id: Option<OpValsInt64>,
project_id: Option<OpValsInt64>,
title: Option<OpValsString>,
done: Option<OpValsBool>,
cid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
ctime: Option<OpValsValue>,
mid: Option<OpValsInt64>,
#[modql(to_sea_value_fn = "time_to_sea_value")]
mtime: Option<OpValsValue>,
}
// endregion: --- Task Types
// region: --- TaskBmc
pub struct TaskBmc;
impl DbBmc for TaskBmc {
const TABLE: &'static str = "task";
}
impl TaskBmc {
pub async fn create(
ctx: &Ctx,
mm: &ModelManager,
task_c: TaskForCreate,
) -> Result<i64> {
base::create::<Self, _>(ctx, mm, task_c).await
}
pub async fn get(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<Task> {
base::get::<Self, _>(ctx, mm, id).await
}
pub async fn list(
ctx: &Ctx,
mm: &ModelManager,
filter: Option<Vec<TaskFilter>>,
list_options: Option<ListOptions>,
) -> Result<Vec<Task>> {
base::list::<Self, _, _>(ctx, mm, filter, list_options).await
}
pub async fn update(
ctx: &Ctx,
mm: &ModelManager,
id: i64,
task_u: TaskForUpdate,
) -> Result<()> {
base::update::<Self, _>(ctx, mm, id, task_u).await
}
pub async fn delete(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<()> {
base::delete::<Self>(ctx, mm, id).await
}
}
// endregion: --- TaskBmc
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use crate::_dev_utils;
use crate::model::project::ProjectBmc;
use crate::model::Error;
use anyhow::Result;
use lib_utils::time::{format_time, now_utc};
use modql::filter::OpValString;
use serde_json::json;
use serial_test::serial;
use std::time::Duration;
use tokio::time::sleep;
#[serial]
#[tokio::test]
async fn test_create_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_title = "test_create_ok title";
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_create_ok project for task ")
.await?;
// -- Exec
let task_c = TaskForCreate {
project_id: fx_project_id,
title: fx_title.to_string(),
};
let id = TaskBmc::create(&ctx, &mm, task_c).await?;
// -- Check
let task = TaskBmc::get(&ctx, &mm, id).await?;
assert_eq!(task.title, fx_title);
// -- Clean
TaskBmc::delete(&ctx, &mm, id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_get_err_not_found() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_id = 100;
// -- Exec
let res = TaskBmc::get(&ctx, &mm, fx_id).await;
// -- Check
assert!(
matches!(
res,
Err(Error::EntityNotFound {
entity: "task",
id: 100
})
),
"EntityNotFound not matching"
);
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_all_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &["test_list_all_ok-task 01", "test_list_all_ok-task 02"];
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_list_all_ok project for task")
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter = TaskFilter {
project_id: Some(fx_project_id.into()),
..Default::default()
};
let tasks = TaskBmc::list(&ctx, &mm, Some(vec![filter]), None).await?;
// -- Check
assert_eq!(tasks.len(), 2, "number of seeded tasks.");
// -- Clean
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_by_title_contains_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &[
"test_list_by_title_contains_ok 01",
"test_list_by_title_contains_ok 02.1",
"test_list_by_title_contains_ok 02.2",
];
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"test_list_by_title_contains_ok project for task ",
)
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter = TaskFilter {
project_id: Some(fx_project_id.into()),
title: Some(
OpValString::Contains("by_title_contains_ok 02".to_string()).into(),
),
..Default::default()
};
let tasks = TaskBmc::list(&ctx, &mm, Some(vec![filter]), None).await?;
// -- Check
assert_eq!(tasks.len(), 2);
// -- Cleanup
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_with_list_options_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_titles = &[
"test_list_with_list_options_ok 01",
"test_list_with_list_options_ok 02.1",
"test_list_with_list_options_ok 02.2",
];
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"test_list_with_list_options_ok project for task ",
)
.await?;
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles).await?;
// -- Exec
let filter: TaskFilter = TaskFilter {
project_id: Some(fx_project_id.into()),
..Default::default()
};
let list_options: ListOptions = serde_json::from_value(json! ({
"offset": 0,
"limit": 2,
"order_bys": "!title"
}))?;
let tasks =
TaskBmc::list(&ctx, &mm, Some(vec![filter]), Some(list_options)).await?;
// -- Check
let titles: Vec<String> =
tasks.iter().map(|t| t.title.to_string()).collect();
assert_eq!(titles.len(), 2);
assert_eq!(
&titles,
&[
"test_list_with_list_options_ok 02.2",
"test_list_with_list_options_ok 02.1"
]
);
// -- Cleanup
// Will delete associated tasks
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_update_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_title = "test_update_ok - task 01";
let fx_title_new = "test_update_ok - task 01 - new";
let fx_project_id =
_dev_utils::seed_project(&ctx, &mm, "test_update_ok project for task")
.await?;
let fx_task = _dev_utils::seed_tasks(&ctx, &mm, fx_project_id, &[fx_title])
.await?
.remove(0);
// -- Exec
TaskBmc::update(
&ctx,
&mm,
fx_task.id,
TaskForUpdate {
title: Some(fx_title_new.to_string()),
..Default::default()
},
)
.await?;
// -- Check
let task = TaskBmc::get(&ctx, &mm, fx_task.id).await?;
assert_eq!(task.title, fx_title_new);
// -- Clean
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_list_by_ctime_ok() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_project_id = _dev_utils::seed_project(
&ctx,
&mm,
"project for tasks test_list_by_ctime_ok",
)
.await?;
let fx_titles_01 = &[
"test_list_by_ctime_ok 01.1",
"test_list_by_ctime_ok 01.2",
"test_list_by_ctime_ok 01.3",
];
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles_01).await?;
let time_marker = format_time(now_utc());
sleep(Duration::from_millis(300)).await;
let fx_titles_02 =
&["test_list_by_ctime_ok 02.1", "test_list_by_ctime_ok 02.2"];
_dev_utils::seed_tasks(&ctx, &mm, fx_project_id, fx_titles_02).await?;
// -- Exec
let filter_json = json! ({
"ctime": {"$gt": time_marker}, // time in Rfc3339
});
let filter = vec![serde_json::from_value(filter_json)?];
let tasks = TaskBmc::list(&ctx, &mm, Some(filter), None).await?;
// -- Check
let titles: Vec<String> = tasks.into_iter().map(|t| t.title).collect();
assert_eq!(titles.len(), 2);
assert_eq!(&titles, fx_titles_02);
// -- Cleanup
ProjectBmc::delete(&ctx, &mm, fx_project_id).await?;
Ok(())
}
#[serial]
#[tokio::test]
async fn test_delete_err_not_found() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_id = 100;
// -- Exec
let res = TaskBmc::delete(&ctx, &mm, fx_id).await;
// -- Check
assert!(
matches!(
res,
Err(Error::EntityNotFound {
entity: "task",
id: 100
})
),
"EntityNotFound not matching"
);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,178 @@
use crate::ctx::Ctx;
use crate::model::base::{self, add_timestamps_for_update, DbBmc};
use crate::model::ModelManager;
use crate::model::Result;
use lib_auth::pwd::{self, ContentToHash};
use modql::field::{Field, Fields, HasFields};
use sea_query::{Expr, Iden, PostgresQueryBuilder, Query};
use sea_query_binder::SqlxBinder;
use serde::{Deserialize, Serialize};
use sqlx::postgres::PgRow;
use sqlx::FromRow;
use uuid::Uuid;
// region: --- User Types
#[derive(Clone, Fields, FromRow, Debug, Serialize)]
pub struct User {
pub id: i64,
pub username: String,
}
#[derive(Deserialize)]
pub struct UserForCreate {
pub username: String,
pub pwd_clear: String,
}
#[derive(Fields)]
pub struct UserForInsert {
pub username: String,
}
#[derive(Clone, FromRow, Fields, Debug)]
pub struct UserForLogin {
pub id: i64,
pub username: String,
// -- pwd and token info
pub pwd: Option<String>, // encrypted, #_scheme_id_#....
pub pwd_salt: Uuid,
pub token_salt: Uuid,
}
#[derive(Clone, FromRow, Fields, Debug)]
pub struct UserForAuth {
pub id: i64,
pub username: String,
// -- token info
pub token_salt: Uuid,
}
/// Marker trait
pub trait UserBy: HasFields + for<'r> FromRow<'r, PgRow> + Unpin + Send {}
impl UserBy for User {}
impl UserBy for UserForLogin {}
impl UserBy for UserForAuth {}
// Note: Since the entity properties Iden will be given by modql
// UserIden does not have to be exhaustive, but just have the columns
// we use in our specific code.
#[derive(Iden)]
enum UserIden {
Id,
Username,
Pwd,
}
// endregion: --- User Types
// region: --- UserBmc
pub struct UserBmc;
impl DbBmc for UserBmc {
const TABLE: &'static str = "user";
}
impl UserBmc {
pub async fn get<E>(ctx: &Ctx, mm: &ModelManager, id: i64) -> Result<E>
where
E: UserBy,
{
base::get::<Self, _>(ctx, mm, id).await
}
pub async fn first_by_username<E>(
_ctx: &Ctx,
mm: &ModelManager,
username: &str,
) -> Result<Option<E>>
where
E: UserBy,
{
let db = mm.db();
// -- Build query
let mut query = Query::select();
query
.from(Self::table_ref())
.columns(E::field_idens())
.and_where(Expr::col(UserIden::Username).eq(username));
// -- Execute query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let entity = sqlx::query_as_with::<_, E, _>(&sql, values)
.fetch_optional(db)
.await?;
Ok(entity)
}
pub async fn update_pwd(
ctx: &Ctx,
mm: &ModelManager,
id: i64,
pwd_clear: &str,
) -> Result<()> {
let db = mm.db();
// -- Prep password
let user: UserForLogin = Self::get(ctx, mm, id).await?;
let pwd = pwd::hash_pwd(&ContentToHash {
content: pwd_clear.to_string(),
salt: user.pwd_salt,
})?;
// -- Prep the data
let mut fields = Fields::new(vec![Field::new(UserIden::Pwd, pwd.into())]);
add_timestamps_for_update(&mut fields, ctx.user_id());
// -- Build query
let fields = fields.for_sea_update();
let mut query = Query::update();
query
.table(Self::table_ref())
.values(fields)
.and_where(Expr::col(UserIden::Id).eq(id));
// -- Exec query
let (sql, values) = query.build_sqlx(PostgresQueryBuilder);
let _count = sqlx::query_with(&sql, values)
.execute(db)
.await?
.rows_affected();
Ok(())
}
}
// endregion: --- UserBmc
// region: --- Tests
#[cfg(test)]
mod tests {
use super::*;
use crate::_dev_utils;
use anyhow::{Context, Result};
use serial_test::serial;
#[serial]
#[tokio::test]
async fn test_first_ok_demo1() -> Result<()> {
// -- Setup & Fixtures
let mm = _dev_utils::init_test().await;
let ctx = Ctx::root_ctx();
let fx_username = "demo1";
// -- Exec
let user: User = UserBmc::first_by_username(&ctx, &mm, fx_username)
.await?
.context("Should have user 'demo1'")?;
// -- Check
assert_eq!(user.username, fx_username);
Ok(())
}
}
// endregion: --- Tests

View File

@@ -0,0 +1,25 @@
[package]
name = "lib-rpc"
version = "0.1.0"
edition = "2021"
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
# -- App Libs
lib-core = { path = "../../libs/lib-core"}
# -- Async
tokio = { version = "1", features = ["full"] }
futures = "0.3"
# -- Json
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = "3"
# -- Data
modql = {version = "0.3.4", features = ["with-sea-query"]}
# -- Others
derive_more = {version = "1.0.0-beta", features = ["from"] }

View File

@@ -0,0 +1,36 @@
use derive_more::From;
use serde::Serialize;
use serde_with::{serde_as, DisplayFromStr};
pub type Result<T> = core::result::Result<T, Error>;
#[serde_as]
#[derive(Debug, From, Serialize)]
pub enum Error {
MissingCtx,
// -- RPC Router
RpcMethodUnknown(String),
RpcIntoParamsMissing,
// -- Modules
#[from]
Model(lib_core::model::Error),
// -- External Modules
#[from]
SerdeJson(#[serde_as(as = "DisplayFromStr")] serde_json::Error),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,17 @@
// region: --- Modules
mod error;
mod params;
mod resources;
mod rpcs;
pub mod router;
pub use self::error::{Error, Result};
pub use params::*;
pub use resources::RpcResources;
pub use router::RpcRequest;
pub use rpcs::*;
// endregion: --- Modules

View File

@@ -0,0 +1,83 @@
//! Base constructs for the typed RPC Params that will be used in their respective
//! rpc handler functions (e.g., `project_rpc::create_project` and `project_rpc::list_projects`).
//!
//! Most of these base constructs use generics for their respective data elements, allowing
//! each rpc handler function to receive the exact desired type.
//!
//! `IntoParams` or `IntoDefaultParams` are implemented to ensure these Params conform to the
//! `RpcRouter` (i.e., `rpc::router`) model.
use crate::router::{IntoDefaultParams, IntoParams};
use crate::Result;
use modql::filter::ListOptions;
use serde::de::DeserializeOwned;
use serde::Deserialize;
use serde_json::Value;
use serde_with::{serde_as, OneOrMany};
/// Params structure for any RPC Create call.
#[derive(Deserialize)]
pub struct ParamsForCreate<D> {
pub data: D,
}
impl<D> IntoParams for ParamsForCreate<D> where D: DeserializeOwned + Send {}
/// Params structure for any RPC Update call.
#[derive(Deserialize)]
pub struct ParamsForUpdate<D> {
pub id: i64,
pub data: D,
}
impl<D> IntoParams for ParamsForUpdate<D> where D: DeserializeOwned + Send {}
/// Params structure for any RPC Update call.
#[derive(Deserialize)]
pub struct ParamsIded {
pub id: i64,
}
impl IntoParams for ParamsIded {}
/// Params structure for any RPC List call.
#[serde_as]
#[derive(Deserialize, Default)]
pub struct ParamsList<F>
where
F: DeserializeOwned,
{
#[serde_as(deserialize_as = "Option<OneOrMany<_>>")]
pub filters: Option<Vec<F>>,
pub list_options: Option<ListOptions>,
}
impl<D> IntoDefaultParams for ParamsList<D> where D: DeserializeOwned + Send + Default
{}
// region: --- General Implementations
/// Implements `IntoParams` for any type that also implements `IntoParams`.
///
/// Note: Application code might prefer to avoid this blanket implementation
/// and opt for enabling it on a per-type basis instead. If that's the case,
/// simply remove this general implementation and provide specific
/// implementations for each type.
impl<D> IntoParams for Option<D>
where
D: DeserializeOwned + Send,
D: IntoParams,
{
fn into_params(value: Option<Value>) -> Result<Self> {
let value = value.map(|v| serde_json::from_value(v)).transpose()?;
Ok(value)
}
}
/// This is the IntoParams implementation for serde_json Value.
///
/// Note: As above, this might not be a capability app code might want to
/// allow for rpc_handlers, prefering to have everything strongly type.
/// In this case, just remove this impelementation
impl IntoParams for Value {}
// endregion: --- General Implementations

View File

@@ -0,0 +1,27 @@
use crate::router::FromResources;
use crate::{Error, Result};
use lib_core::ctx::Ctx;
use lib_core::model::ModelManager;
pub struct RpcResources {
pub mm: ModelManager,
pub ctx: Option<Ctx>,
}
impl FromResources for Ctx {
fn from_resources(rpc_resources: &RpcResources) -> Result<Self> {
rpc_resources.ctx.as_ref().cloned().ok_or(Error::MissingCtx)
}
}
impl FromResources for Option<Ctx> {
fn from_resources(rpc_resources: &RpcResources) -> Result<Self> {
Ok(rpc_resources.ctx.as_ref().cloned())
}
}
impl FromResources for ModelManager {
fn from_resources(rpc_resources: &RpcResources) -> Result<Self> {
Ok(rpc_resources.mm.clone())
}
}

View File

@@ -0,0 +1,8 @@
use crate::router::Result;
use crate::RpcResources;
pub trait FromResources {
fn from_resources(rpc_resources: &RpcResources) -> Result<Self>
where
Self: Sized;
}

View File

@@ -0,0 +1,33 @@
use crate::router::{Error, Result};
use serde::de::DeserializeOwned;
use serde_json::Value;
/// `IntoHandlerParams` allows for converting an `Option<Value>` into
/// the necessary type for RPC handler parameters.
/// The default implementation below will result in failure if the value is `None`.
/// For customized behavior, users can implement their own `into_handler_params`
/// method.
pub trait IntoParams: DeserializeOwned + Send {
fn into_params(value: Option<Value>) -> Result<Self> {
match value {
Some(value) => Ok(serde_json::from_value(value)?),
None => Err(Error::RpcIntoParamsMissing),
}
}
}
/// Marker trait with a blanket implementation that return T::default
/// if the `params: Option<Value>` is none.
pub trait IntoDefaultParams: DeserializeOwned + Send + Default {}
impl<P> IntoParams for P
where
P: IntoDefaultParams,
{
fn into_params(value: Option<Value>) -> Result<Self> {
match value {
Some(value) => Ok(serde_json::from_value(value)?),
None => Ok(Self::default()),
}
}
}

View File

@@ -0,0 +1,165 @@
//! rpc::router module provides the type and implementation for
//! json rpc routing.
//!
//! It has the following constructs:
//!
//! - `RpcRouter` holds the HashMap of `method_name: Box<dyn RpcHandlerWrapperTrait>`.
//! - `RpcHandler` trait is implemented for any async function that, with
//! `(S1, S2, ...[impl IntoParams])`, returns `web::Result<Serialize>` where S1, S2, ... are
//! types that implement `FromResources` (see router/from_resources.rs and src/resources.rs).
//! - `IntoParams` is the trait to implement to instruct how to go from `Option<Value>` json-rpc params
//! to the handler's param types.
//! - `IntoParams` has a default `into_params` implementation that will return an error if the params are missing.
//!
//! ```
//! #[derive(Deserialize)]
//! pub struct ParamsIded {
//! id: i64,
//! }
//!
//! impl IntoParams for ParamsIded {}
//! ```
//!
//! - For custom `IntoParams` behavior, implement the `IntoParams::into_params` function.
//! - Implementing `IntoDefaultParams` on a type that implements `Default` will auto-implement `IntoParams`
//! and call `T::default()` when the params `Option<Value>` is None.
//!
// region: --- Modules
mod from_resources;
mod into_params;
mod rpc_handler;
mod rpc_handler_wrapper;
pub use from_resources::FromResources;
pub use into_params::{IntoDefaultParams, IntoParams};
pub use rpc_handler::RpcHandler;
pub use rpc_handler_wrapper::{RpcHandlerWrapper, RpcHandlerWrapperTrait};
use crate::RpcResources;
use crate::{Error, Result};
use futures::Future;
use serde::Deserialize;
use serde_json::Value;
use std::collections::HashMap;
use std::pin::Pin;
// endregion: --- Modules
/// The raw JSON-RPC request object, serving as the foundation for RPC routing.
#[derive(Deserialize)]
pub struct RpcRequest {
pub id: Option<Value>,
pub method: String,
pub params: Option<Value>,
}
pub type PinFutureValue = Pin<Box<dyn Future<Output = Result<Value>> + Send>>;
/// method, which calls the appropriate handler matching the method_name.
///
/// RpcRouter can be extended with other RpcRouters for composability.
pub struct RpcRouter {
route_by_name: HashMap<&'static str, Box<dyn RpcHandlerWrapperTrait>>,
}
impl RpcRouter {
#[allow(clippy::new_without_default)] // Persosnal preference (for this case)
pub fn new() -> Self {
Self {
route_by_name: HashMap::new(),
}
}
/// Add a dyn_handler to the router.
///
/// ```
/// RpcRouter::new().add_dyn("method_name", my_handler_fn.into_dyn());
/// ```
///
/// Note: This is the preferred way to add handlers to the router, as it
/// avoids monomorphization of the add function.
/// The RpcRouter also has a `.add()` as a convenience function to just pass the function.
/// See `RpcRouter::add` for more details.
pub fn add_dyn(
mut self,
name: &'static str,
dyn_handler: Box<dyn RpcHandlerWrapperTrait>,
) -> Self {
self.route_by_name.insert(name, dyn_handler);
self
}
/// Add an handler function to the router.
///
/// ```
/// RpcRouter::new().add("method_name", my_handler_fn);
/// ```
///
/// Note: This is a convenient add function variant with generics,
/// and there will be monomorphed versions of this function
/// for each type passed. Use `RpcRouter::add_dyn` to avoid this.
pub fn add<F, T, P, R>(self, name: &'static str, handler: F) -> Self
where
F: RpcHandler<T, P, R> + Clone + Send + Sync + 'static,
T: Send + Sync + 'static,
P: Send + Sync + 'static,
R: Send + Sync + 'static,
{
self.add_dyn(name, handler.into_dyn())
}
pub fn extend(mut self, other_router: RpcRouter) -> Self {
self.route_by_name.extend(other_router.route_by_name);
self
}
pub async fn call(
&self,
method: &str,
rpc_resources: RpcResources,
params: Option<Value>,
) -> Result<Value> {
if let Some(route) = self.route_by_name.get(method) {
route.call(rpc_resources, params).await
} else {
Err(Error::RpcMethodUnknown(method.to_string()))
}
}
}
/// A simple macro to create a new RpcRouter
/// and add each rpc handler-compatible function along with their corresponding names.
///
/// e.g.,
///
/// ```
/// rpc_router!(
/// create_project,
/// list_projects,
/// update_project,
/// delete_project
/// );
/// ```
/// Is equivalent to:
/// ```
/// RpcRouter::new()
/// .add_dyn("create_project", create_project.into_box())
/// .add_dyn("list_projects", list_projects.into_box())
/// .add_dyn("update_project", update_project.into_box())
/// .add_dyn("delete_project", delete_project.into_box())
/// ```
#[macro_export]
macro_rules! rpc_router {
($($fn_name:ident),+ $(,)?) => {
{
use $crate::router::{RpcHandler, RpcRouter};
let mut router = RpcRouter::new();
$(
router = router.add_dyn(stringify!($fn_name), $fn_name.into_dyn());
)+
router
}
};
}

View File

@@ -0,0 +1,120 @@
use crate::router::into_params::IntoParams;
use crate::router::FromResources;
use crate::router::PinFutureValue;
use crate::router::Result;
use crate::router::RpcHandlerWrapper;
use crate::router::RpcHandlerWrapperTrait;
use crate::RpcResources;
use futures::Future;
use serde::Serialize;
use serde_json::Value;
/// The `Handler` trait that will be implemented by rpc handler functions.
///
/// Key points:
/// - Rpc handler functions are asynchronous, thus returning a Future of Result<Value>.
/// - The call format is normalized to two `impl FromResources` arguments (for now) and one optionals `impl IntoParams`, which represent the json-rpc's optional value.
/// - `into_box` is a convenient method for converting a RpcHandler into a Boxed dyn RpcHandlerWrapperTrait,
/// allowing for dynamic dispatch by the Router.
/// - A `RpcHandler` will typically be implemented for static functions, as `FnOnce`,
/// enabling them to be cloned with none or negligible performance impact,
/// thus facilitating the use of RpcRoute dynamic dispatch.
/// - `T` is the tuple of `impl FromResources` arguments.
/// - `P` is the `impl IntoParams` argument.
///
pub trait RpcHandler<T, P, R>: Clone
where
T: Send + Sync + 'static,
P: Send + Sync + 'static,
R: Send + Sync + 'static,
{
/// The type of future calling this handler returns.
type Future: Future<Output = Result<Value>> + Send + 'static;
/// Call the handler.
fn call(
self,
rpc_resources: RpcResources,
params: Option<Value>,
) -> Self::Future;
/// Convert this RpcHandler into a Boxed dyn RpcHandlerWrapperTrait,
/// for dynamic dispatch by the Router.
fn into_dyn(self) -> Box<dyn RpcHandlerWrapperTrait>
where
Self: Sized + Send + Sync + 'static,
{
Box::new(RpcHandlerWrapper::new(self)) as Box<dyn RpcHandlerWrapperTrait>
}
}
/// Macro generatring the RpcHandler implementations for zero or more FromResources with the last argument being IntoParams
/// and one with not last IntoParams argument.
macro_rules! impl_rpc_handler_pair {
($($T:ident),*) => {
// RpcHandler implementations for zero or more FromResources with the last argument being IntoParams
impl<F, Fut, $($T,)* P, R> RpcHandler<($($T,)*), (P,), R> for F
where
F: FnOnce($($T,)* P) -> Fut + Clone + Send + 'static,
$( $T: FromResources + Send + Sync + 'static, )*
P: IntoParams + Send + Sync + 'static,
R: Serialize + Send + Sync + 'static,
Fut: Future<Output = Result<R>> + Send,
{
type Future = PinFutureValue;
#[allow(unused)] // somehow rpc_resources will be marked as unused
fn call(
self,
rpc_resources: RpcResources,
params_value: Option<Value>,
) -> Self::Future {
Box::pin(async move {
let param = P::into_params(params_value)?;
let result = self(
$( $T::from_resources(&rpc_resources)?, )*
param,
)
.await?;
Ok(serde_json::to_value(result)?)
})
}
}
// RpcHandler implementations for zero or more FromResources and NO IntoParams
impl<F, Fut, $($T,)* R> RpcHandler<($($T,)*), (), R> for F
where
F: FnOnce($($T,)*) -> Fut + Clone + Send + 'static,
$( $T: FromResources + Send + Sync + 'static, )*
R: Serialize + Send + Sync + 'static,
Fut: Future<Output = Result<R>> + Send,
{
type Future = PinFutureValue;
#[allow(unused)] // somehow rpc_resources will be marked as unused
fn call(
self,
rpc_resources: RpcResources,
_params: Option<Value>,
) -> Self::Future {
Box::pin(async move {
let result = self(
$( $T::from_resources(&rpc_resources)?, )*
)
.await?;
Ok(serde_json::to_value(result)?)
})
}
}
};
}
impl_rpc_handler_pair!();
impl_rpc_handler_pair!(T1);
impl_rpc_handler_pair!(T1, T2);
impl_rpc_handler_pair!(T1, T2, T3);
impl_rpc_handler_pair!(T1, T2, T3, T4);
impl_rpc_handler_pair!(T1, T2, T3, T4, T5);

View File

@@ -0,0 +1,71 @@
use crate::router::Result;
use crate::router::{PinFutureValue, RpcHandler};
use crate::RpcResources;
use futures::Future;
use serde_json::Value;
use std::marker::PhantomData;
use std::pin::Pin;
/// `RpcHanlderWrapper` is a `RpcHandler` wrapper which implements
/// `RpcHandlerWrapperTrait` for type erasure, enabling dynamic dispatch.
#[derive(Clone)]
pub struct RpcHandlerWrapper<H, T, P, R> {
handler: H,
_marker: PhantomData<(T, P, R)>,
}
// Constructor
impl<H, T, P, R> RpcHandlerWrapper<H, T, P, R> {
pub fn new(handler: H) -> Self {
Self {
handler,
_marker: PhantomData,
}
}
}
// Call Impl
impl<H, T, P, R> RpcHandlerWrapper<H, T, P, R>
where
H: RpcHandler<T, P, R> + Send + Sync + 'static,
T: Send + Sync + 'static,
P: Send + Sync + 'static,
R: Send + Sync + 'static,
{
pub fn call(
&self,
rpc_resources: RpcResources,
params: Option<Value>,
) -> H::Future {
// Note: Since handler is a FnOnce, we can use it only once, so we clone it.
// This is likely optimized by the compiler.
let handler = self.handler.clone();
RpcHandler::call(handler, rpc_resources, params)
}
}
/// `RpcHandlerWrapperTrait` enables `RpcHandlerWrapper` to become a trait object,
/// allowing for dynamic dispatch.
pub trait RpcHandlerWrapperTrait: Send + Sync {
fn call(
&self,
rpc_resources: RpcResources,
params: Option<Value>,
) -> PinFutureValue;
}
impl<H, T, P, R> RpcHandlerWrapperTrait for RpcHandlerWrapper<H, T, P, R>
where
H: RpcHandler<T, P, R> + Clone + Send + Sync + 'static,
T: Send + Sync + 'static,
P: Send + Sync + 'static,
R: Send + Sync + 'static,
{
fn call(
&self,
rpc_resources: RpcResources,
params: Option<Value>,
) -> Pin<Box<dyn Future<Output = Result<Value>> + Send>> {
Box::pin(self.call(rpc_resources, params))
}
}

View File

@@ -0,0 +1,70 @@
use crate::router::RpcRouter;
use crate::rpc_router;
use crate::Result;
use crate::{ParamsForCreate, ParamsForUpdate, ParamsIded, ParamsList};
use lib_core::ctx::Ctx;
use lib_core::model::task::{
Task, TaskBmc, TaskFilter, TaskForCreate, TaskForUpdate,
};
use lib_core::model::ModelManager;
pub fn rpc_router() -> RpcRouter {
rpc_router!(
// Same as RpcRouter::new().add...
create_task,
list_tasks,
update_task,
delete_task,
)
}
pub async fn create_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsForCreate<TaskForCreate>,
) -> Result<Task> {
let ParamsForCreate { data } = params;
let id = TaskBmc::create(&ctx, &mm, data).await?;
let task = TaskBmc::get(&ctx, &mm, id).await?;
Ok(task)
}
pub async fn list_tasks(
ctx: Ctx,
mm: ModelManager,
params: ParamsList<TaskFilter>,
) -> Result<Vec<Task>> {
let tasks =
TaskBmc::list(&ctx, &mm, params.filters, params.list_options).await?;
Ok(tasks)
}
pub async fn update_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsForUpdate<TaskForUpdate>,
) -> Result<Task> {
let ParamsForUpdate { id, data } = params;
TaskBmc::update(&ctx, &mm, id, data).await?;
let task = TaskBmc::get(&ctx, &mm, id).await?;
Ok(task)
}
pub async fn delete_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsIded,
) -> Result<Task> {
let ParamsIded { id } = params;
let task = TaskBmc::get(&ctx, &mm, id).await?;
TaskBmc::delete(&ctx, &mm, id).await?;
Ok(task)
}

View File

@@ -0,0 +1,3 @@
pub mod project_rpc;
pub mod task_rpc;
pub mod books;

View File

@@ -0,0 +1,70 @@
use crate::router::RpcRouter;
use crate::rpc_router;
use crate::Result;
use crate::{ParamsForCreate, ParamsForUpdate, ParamsIded, ParamsList};
use lib_core::ctx::Ctx;
use lib_core::model::project::{
Project, ProjectBmc, ProjectFilter, ProjectForCreate, ProjectForUpdate,
};
use lib_core::model::ModelManager;
pub fn rpc_router() -> RpcRouter {
rpc_router!(
// Same as RpcRouter::new().add...
create_project,
list_projects,
update_project,
delete_project,
)
}
pub async fn create_project(
ctx: Ctx,
mm: ModelManager,
params: ParamsForCreate<ProjectForCreate>,
) -> Result<Project> {
let ParamsForCreate { data } = params;
let id = ProjectBmc::create(&ctx, &mm, data).await?;
let project = ProjectBmc::get(&ctx, &mm, id).await?;
Ok(project)
}
pub async fn list_projects(
ctx: Ctx,
mm: ModelManager,
params: ParamsList<ProjectFilter>,
) -> Result<Vec<Project>> {
let projects =
ProjectBmc::list(&ctx, &mm, params.filters, params.list_options).await?;
Ok(projects)
}
pub async fn update_project(
ctx: Ctx,
mm: ModelManager,
params: ParamsForUpdate<ProjectForUpdate>,
) -> Result<Project> {
let ParamsForUpdate { id, data } = params;
ProjectBmc::update(&ctx, &mm, id, data).await?;
let project = ProjectBmc::get(&ctx, &mm, id).await?;
Ok(project)
}
pub async fn delete_project(
ctx: Ctx,
mm: ModelManager,
params: ParamsIded,
) -> Result<Project> {
let ParamsIded { id } = params;
let project = ProjectBmc::get(&ctx, &mm, id).await?;
ProjectBmc::delete(&ctx, &mm, id).await?;
Ok(project)
}

View File

@@ -0,0 +1,70 @@
use crate::router::RpcRouter;
use crate::rpc_router;
use crate::Result;
use crate::{ParamsForCreate, ParamsForUpdate, ParamsIded, ParamsList};
use lib_core::ctx::Ctx;
use lib_core::model::task::{
Task, TaskBmc, TaskFilter, TaskForCreate, TaskForUpdate,
};
use lib_core::model::ModelManager;
pub fn rpc_router() -> RpcRouter {
rpc_router!(
// Same as RpcRouter::new().add...
create_task,
list_tasks,
update_task,
delete_task,
)
}
pub async fn create_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsForCreate<TaskForCreate>,
) -> Result<Task> {
let ParamsForCreate { data } = params;
let id = TaskBmc::create(&ctx, &mm, data).await?;
let task = TaskBmc::get(&ctx, &mm, id).await?;
Ok(task)
}
pub async fn list_tasks(
ctx: Ctx,
mm: ModelManager,
params: ParamsList<TaskFilter>,
) -> Result<Vec<Task>> {
let tasks =
TaskBmc::list(&ctx, &mm, params.filters, params.list_options).await?;
Ok(tasks)
}
pub async fn update_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsForUpdate<TaskForUpdate>,
) -> Result<Task> {
let ParamsForUpdate { id, data } = params;
TaskBmc::update(&ctx, &mm, id, data).await?;
let task = TaskBmc::get(&ctx, &mm, id).await?;
Ok(task)
}
pub async fn delete_task(
ctx: Ctx,
mm: ModelManager,
params: ParamsIded,
) -> Result<Task> {
let ParamsIded { id } = params;
let task = TaskBmc::get(&ctx, &mm, id).await?;
TaskBmc::delete(&ctx, &mm, id).await?;
Ok(task)
}

View File

@@ -0,0 +1,14 @@
[package]
name = "lib-utils"
version = "0.1.0"
edition = "2021"
[lib]
doctest = false
[lints]
workspace = true
[dependencies]
base64 = "0.21"
time = {version = "0.3", features = ["formatting", "parsing", "serde"]}

View File

@@ -0,0 +1,42 @@
use base64::engine::{general_purpose, Engine};
pub fn b64u_encode(content: impl AsRef<[u8]>) -> String {
general_purpose::URL_SAFE_NO_PAD.encode(content)
}
pub fn b64u_decode(b64u: &str) -> Result<Vec<u8>> {
general_purpose::URL_SAFE_NO_PAD
.decode(b64u)
.map_err(|_| Error::FailToB64uDecode)
}
pub fn b64u_decode_to_string(b64u: &str) -> Result<String> {
b64u_decode(b64u)
.ok()
.and_then(|r| String::from_utf8(r).ok())
.ok_or(Error::FailToB64uDecode)
}
// region: --- Error
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
FailToB64uDecode,
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate
// endregion: --- Error

View File

@@ -0,0 +1,40 @@
use crate::b64::b64u_decode;
use std::env;
use std::str::FromStr;
pub fn get_env(name: &'static str) -> Result<String> {
env::var(name).map_err(|_| Error::MissingEnv(name))
}
pub fn get_env_parse<T: FromStr>(name: &'static str) -> Result<T> {
let val = get_env(name)?;
val.parse::<T>().map_err(|_| Error::WrongFormat(name))
}
pub fn get_env_b64u_as_u8s(name: &'static str) -> Result<Vec<u8>> {
b64u_decode(&get_env(name)?).map_err(|_| Error::WrongFormat(name))
}
// region: --- Error
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
MissingEnv(&'static str),
WrongFormat(&'static str),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate
// endregion: --- Error

View File

@@ -0,0 +1,9 @@
//! The utils module is designed to export independent sub-modules to the application code.
//!
//! Note: Even if the util sub-modules consist of a single file, they contain their own errors
//! for improved compartmentalization.
//!
pub mod b64;
pub mod envs;
pub mod time;

View File

@@ -0,0 +1,45 @@
use time::{Duration, OffsetDateTime};
pub use time::format_description::well_known::Rfc3339;
pub fn now_utc() -> OffsetDateTime {
OffsetDateTime::now_utc()
}
pub fn format_time(time: OffsetDateTime) -> String {
time.format(&Rfc3339).unwrap() // TODO: need to check if safe.
}
pub fn now_utc_plus_sec_str(sec: f64) -> String {
let new_time = now_utc() + Duration::seconds_f64(sec);
format_time(new_time)
}
pub fn parse_utc(moment: &str) -> Result<OffsetDateTime> {
OffsetDateTime::parse(moment, &Rfc3339)
.map_err(|_| Error::FailToDateParse(moment.to_string()))
}
// region: --- Error
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug)]
pub enum Error {
FailToDateParse(String),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate
// endregion: --- Error

View File

@@ -0,0 +1,38 @@
[package]
name = "web-server"
version = "0.1.0"
edition = "2021"
[dependencies]
# -- App Libs
lib-utils = { path = "../../libs/lib-utils"}
lib-rpc = { path = "../../libs/lib-rpc"}
lib-auth = { path = "../../libs/lib-auth"}
lib-core = { path = "../../libs/lib-core"}
# -- Async
tokio = { version = "1", features = ["full"] }
async-trait = "0.1"
# -- Json
serde = { version = "1", features = ["derive"] }
serde_json = "1"
serde_with = "3"
# -- Web
axum = {version = "0.7", features = ["macros"]}
tower-http = { version = "0.5", features = ["fs"] }
tower-cookies = "0.10"
# -- Tracing
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
# -- Others
time = "0.3"
uuid = {version = "1", features = ["v4","fast-rng",]}
strum_macros = "0.25"
derive_more = {version = "1.0.0-beta", features = ["from"] }
[dev-dependencies]
anyhow = "1"
httpc-test = "0.1"
serial_test = "2"
time = "0.3"

View File

@@ -0,0 +1,137 @@
#![allow(unused)] // For example code.
use anyhow::Result;
use serde_json::json;
#[tokio::main]
async fn main() -> Result<()> {
let hc = httpc_test::new_client("http://localhost:8080")?;
// hc.do_get("/index.html").await?.print().await?;
let req_login = hc.do_post(
"/api/login",
json!({
"username": "demo1",
"pwd": "welcome"
}),
);
req_login.await?.print().await?;
let req_create_project = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "create_project",
"params": {
"data": {
"name": "project AAA"
}
}
}),
);
let result = req_create_project.await?;
result.print().await?;
let project_id = result.json_value::<i64>("/result/id")?;
let mut task_ids: Vec<i64> = Vec::new();
for i in 1..=5 {
let req_create_task = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "create_task",
"params": {
"data": {
"project_id": project_id,
"title": format!("task AAA {i}")
}
}
}),
);
let result = req_create_task.await?;
task_ids.push(result.json_value::<i64>("/result/id")?);
}
let req_update_task = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "update_task",
"params": {
"id": task_ids[0], // The first task created.
"data": {
"title": "task BB"
}
}
}),
);
req_update_task.await?.print().await?;
let req_delete_task = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "delete_task",
"params": {
"id": task_ids[1] // The second task created.
}
}),
);
req_delete_task.await?.print().await?;
let req_list_all_tasks = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "list_tasks",
"params": {
"filters": {
"project_id": project_id
},
"list_options": {
"order_bys": "!title"
}
}
}),
);
req_list_all_tasks.await?.print().await?;
let req_list_b_tasks = hc.do_post(
"/api/rpc",
json!({
"id": 1,
"method": "list_tasks",
"params": {
"filters": [
{
"project_id": project_id,
"title": {"$contains": "BB"},
},
// Shows how to use other $in
{
"project_id": { "$in": [project_id] },
"title": {"$in": ["task AAA 3", "task AAA 4"]}
},
// This won't match any projects, so, won't return anything.
{
"project_id": { "$in": [ 123, 124]},
"title": {"$in": ["task AAA 2", "task AAA 5"]}
}
]
}
}),
);
req_list_b_tasks.await?.print().await?;
let req_logoff = hc.do_post(
"/api/logoff",
json!({
"logoff": true
}),
);
// req_logoff.await?.print().await?;
Ok(())
}

View File

@@ -0,0 +1,25 @@
use lib_utils::envs::get_env;
use std::sync::OnceLock;
pub fn web_config() -> &'static WebConfig {
static INSTANCE: OnceLock<WebConfig> = OnceLock::new();
INSTANCE.get_or_init(|| {
WebConfig::load_from_env().unwrap_or_else(|ex| {
panic!("FATAL - WHILE LOADING CONF - Cause: {ex:?}")
})
})
}
#[allow(non_snake_case)]
pub struct WebConfig {
pub WEB_FOLDER: String,
}
impl WebConfig {
fn load_from_env() -> lib_utils::envs::Result<WebConfig> {
Ok(WebConfig {
WEB_FOLDER: get_env("SERVICE_WEB_FOLDER")?,
})
}
}

View File

@@ -0,0 +1,24 @@
use derive_more::From;
use lib_core::model;
pub type Result<T> = core::result::Result<T, Error>;
#[derive(Debug, From)]
pub enum Error {
// -- Modules
#[from]
Model(model::Error),
}
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate

View File

@@ -0,0 +1,87 @@
use crate::web::mw_stamp::ReqStamp;
use crate::web::routes_rpc::RpcInfo;
use crate::web::{self, ClientError};
use crate::Result;
use axum::http::{Method, Uri};
use lib_core::ctx::Ctx;
use lib_utils::time::{format_time, now_utc};
use serde::Serialize;
use serde_json::{json, Value};
use serde_with::skip_serializing_none;
use time::Duration;
use tracing::debug;
pub async fn log_request(
http_method: Method,
uri: Uri,
req_stamp: ReqStamp,
rpc_info: Option<&RpcInfo>,
ctx: Option<Ctx>,
web_error: Option<&web::Error>,
client_error: Option<ClientError>,
) -> Result<()> {
// -- Prep error
let error_type = web_error.map(|se| se.as_ref().to_string());
let error_data = serde_json::to_value(web_error)
.ok()
.and_then(|mut v| v.get_mut("data").map(|v| v.take()));
// -- Prep Req Information
let ReqStamp { uuid, time_in } = req_stamp;
let now = now_utc();
let duration: Duration = now - time_in;
// duration_ms in milliseconds with microseconds precision.
let duration_ms = (duration.as_seconds_f64() * 1_000_000.).floor() / 1_000.;
// Create the RequestLogLine
let log_line = RequestLogLine {
uuid: uuid.to_string(),
timestamp: format_time(now), // LogLine timestamp ("time_out")
time_in: format_time(time_in),
duration_ms,
http_path: uri.to_string(),
http_method: http_method.to_string(),
rpc_id: rpc_info.and_then(|rpc| rpc.id.as_ref().map(|id| id.to_string())),
rpc_method: rpc_info.map(|rpc| rpc.method.to_string()),
user_id: ctx.map(|c| c.user_id()),
client_error_type: client_error.map(|e| e.as_ref().to_string()),
error_type,
error_data,
};
debug!("REQUEST LOG LINE:\n{}", json!(log_line));
// TODO - Send to cloud-watch.
Ok(())
}
#[skip_serializing_none]
#[derive(Serialize)]
struct RequestLogLine {
uuid: String, // uuid string formatted
timestamp: String, // (Rfc3339)
time_in: String, // (Rfc3339)
duration_ms: f64,
// -- User and context attributes.
user_id: Option<i64>,
// -- http request attributes.
http_path: String,
http_method: String,
// -- rpc info.
rpc_id: Option<String>,
rpc_method: Option<String>,
// -- Errors attributes.
client_error_type: Option<String>,
error_type: Option<String>,
error_data: Option<Value>,
}

View File

@@ -0,0 +1,64 @@
// region: --- Modules
mod config;
mod error;
mod log;
mod web;
pub use self::error::{Error, Result};
use config::web_config;
use crate::web::mw_auth::{mw_ctx_require, mw_ctx_resolve};
use crate::web::mw_res_map::mw_reponse_map;
use crate::web::mw_stamp::mw_req_stamp;
use crate::web::routes_rpc::RpcState;
use crate::web::{routes_login, routes_static};
use axum::{middleware, Router};
use lib_core::_dev_utils;
use lib_core::model::ModelManager;
use tokio::net::TcpListener;
use tower_cookies::CookieManagerLayer;
use tracing::info;
use tracing_subscriber::EnvFilter;
// endregion: --- Modules
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt()
.without_time() // For early local development.
.with_target(false)
.with_env_filter(EnvFilter::from_default_env())
.init();
// -- FOR DEV ONLY
_dev_utils::init_dev().await;
// Initialize ModelManager.
let mm = ModelManager::new().await?;
// -- Define Routes
let rpc_state = RpcState { mm: mm.clone() };
let routes_rpc = web::routes_rpc::routes(rpc_state)
.route_layer(middleware::from_fn(mw_ctx_require));
let routes_all = Router::new()
.merge(routes_login::routes(mm.clone()))
.nest("/api", routes_rpc)
.layer(middleware::map_response(mw_reponse_map))
.layer(middleware::from_fn_with_state(mm.clone(), mw_ctx_resolve))
.layer(middleware::from_fn(mw_req_stamp))
.layer(CookieManagerLayer::new())
.fallback_service(routes_static::serve_dir());
// region: --- Start Server
// Note: For this block, ok to unwrap.
let listener = TcpListener::bind("127.0.0.1:8080").await.unwrap();
info!("{:<12} - {:?}\n", "LISTENING", listener.local_addr());
axum::serve(listener, routes_all.into_make_service())
.await
.unwrap();
// endregion: --- Start Server
Ok(())
}

View File

@@ -0,0 +1,121 @@
use crate::web;
use axum::http::StatusCode;
use axum::response::{IntoResponse, Response};
use derive_more::From;
use lib_auth::{pwd, token};
use lib_core::model;
use serde::Serialize;
use serde_with::{serde_as, DisplayFromStr};
use std::sync::Arc;
use tracing::debug;
pub type Result<T> = core::result::Result<T, Error>;
#[serde_as]
#[derive(Debug, Serialize, From, strum_macros::AsRefStr)]
#[serde(tag = "type", content = "data")]
pub enum Error {
// -- Login
LoginFailUsernameNotFound,
LoginFailUserHasNoPwd {
user_id: i64,
},
LoginFailPwdNotMatching {
user_id: i64,
},
// -- CtxExtError
#[from]
CtxExt(web::mw_auth::CtxExtError),
// -- ReqStamp
ReqStampNotInResponseExt,
// -- Modules
#[from]
Model(model::Error),
#[from]
Pwd(pwd::Error),
#[from]
Token(token::Error),
#[from]
Rpc(lib_rpc::Error),
// -- External Modules
#[from]
SerdeJson(#[serde_as(as = "DisplayFromStr")] serde_json::Error),
}
// region: --- Axum IntoResponse
impl IntoResponse for Error {
fn into_response(self) -> Response {
debug!("{:<12} - model::Error {self:?}", "INTO_RES");
// Create a placeholder Axum reponse.
let mut response = StatusCode::INTERNAL_SERVER_ERROR.into_response();
// Insert the Error into the reponse.
response.extensions_mut().insert(Arc::new(self));
response
}
}
// endregion: --- Axum IntoResponse
// region: --- Error Boilerplate
impl core::fmt::Display for Error {
fn fmt(
&self,
fmt: &mut core::fmt::Formatter,
) -> core::result::Result<(), core::fmt::Error> {
write!(fmt, "{self:?}")
}
}
impl std::error::Error for Error {}
// endregion: --- Error Boilerplate
// region: --- Client Error
/// From the root error to the http status code and ClientError
impl Error {
pub fn client_status_and_error(&self) -> (StatusCode, ClientError) {
use web::Error::*;
match self {
// -- Login
LoginFailUsernameNotFound
| LoginFailUserHasNoPwd { .. }
| LoginFailPwdNotMatching { .. } => {
(StatusCode::FORBIDDEN, ClientError::LOGIN_FAIL)
}
// -- Auth
CtxExt(_) => (StatusCode::FORBIDDEN, ClientError::NO_AUTH),
// -- Model
Model(model::Error::EntityNotFound { entity, id }) => (
StatusCode::BAD_REQUEST,
ClientError::ENTITY_NOT_FOUND { entity, id: *id },
),
// -- Fallback.
_ => (
StatusCode::INTERNAL_SERVER_ERROR,
ClientError::SERVICE_ERROR,
),
}
}
}
#[derive(Debug, Serialize, strum_macros::AsRefStr)]
#[serde(tag = "message", content = "detail")]
#[allow(non_camel_case_types)]
pub enum ClientError {
LOGIN_FAIL,
NO_AUTH,
ENTITY_NOT_FOUND { entity: &'static str, id: i64 },
SERVICE_ERROR,
}
// endregion: --- Client Error

View File

@@ -0,0 +1,40 @@
// region: --- Modules
mod error;
pub mod mw_auth;
pub mod mw_res_map;
pub mod mw_stamp;
pub mod routes_login;
pub mod routes_rpc;
pub mod routes_static;
pub use self::error::ClientError;
pub use self::error::{Error, Result};
use lib_auth::token::generate_web_token;
use tower_cookies::{Cookie, Cookies};
use uuid::Uuid;
// endregion: --- Modules
pub const AUTH_TOKEN: &str = "auth-token";
fn set_token_cookie(cookies: &Cookies, user: &str, salt: Uuid) -> Result<()> {
let token = generate_web_token(user, salt)?;
let mut cookie = Cookie::new(AUTH_TOKEN, token.to_string());
cookie.set_http_only(true);
cookie.set_path("/");
cookies.add(cookie);
Ok(())
}
fn remove_token_cookie(cookies: &Cookies) -> Result<()> {
let mut cookie = Cookie::from(AUTH_TOKEN);
cookie.set_path("/");
cookies.remove(cookie);
Ok(())
}

View File

@@ -0,0 +1,121 @@
use crate::web::{set_token_cookie, AUTH_TOKEN};
use crate::web::{Error, Result};
use async_trait::async_trait;
use axum::body::Body;
use axum::extract::{FromRequestParts, State};
use axum::http::request::Parts;
use axum::http::Request;
use axum::middleware::Next;
use axum::response::Response;
use lib_auth::token::{validate_web_token, Token};
use lib_core::ctx::Ctx;
use lib_core::model::user::{UserBmc, UserForAuth};
use lib_core::model::ModelManager;
use serde::Serialize;
use tower_cookies::{Cookie, Cookies};
use tracing::debug;
pub async fn mw_ctx_require(
ctx: Result<CtxW>,
req: Request<Body>,
next: Next,
) -> Result<Response> {
debug!("{:<12} - mw_ctx_require - {ctx:?}", "MIDDLEWARE");
ctx?;
Ok(next.run(req).await)
}
pub async fn mw_ctx_resolve(
mm: State<ModelManager>,
cookies: Cookies,
mut req: Request<Body>,
next: Next,
) -> Result<Response> {
debug!("{:<12} - mw_ctx_resolve", "MIDDLEWARE");
let ctx_ext_result = _ctx_resolve(mm, &cookies).await;
if ctx_ext_result.is_err()
&& !matches!(ctx_ext_result, Err(CtxExtError::TokenNotInCookie))
{
cookies.remove(Cookie::from(AUTH_TOKEN))
}
// Store the ctx_ext_result in the request extension
// (for Ctx extractor).
req.extensions_mut().insert(ctx_ext_result);
Ok(next.run(req).await)
}
async fn _ctx_resolve(mm: State<ModelManager>, cookies: &Cookies) -> CtxExtResult {
// -- Get Token String
let token = cookies
.get(AUTH_TOKEN)
.map(|c| c.value().to_string())
.ok_or(CtxExtError::TokenNotInCookie)?;
// -- Parse Token
let token: Token = token.parse().map_err(|_| CtxExtError::TokenWrongFormat)?;
// -- Get UserForAuth
let user: UserForAuth =
UserBmc::first_by_username(&Ctx::root_ctx(), &mm, &token.ident)
.await
.map_err(|ex| CtxExtError::ModelAccessError(ex.to_string()))?
.ok_or(CtxExtError::UserNotFound)?;
// -- Validate Token
validate_web_token(&token, user.token_salt)
.map_err(|_| CtxExtError::FailValidate)?;
// -- Update Token
set_token_cookie(cookies, &user.username, user.token_salt)
.map_err(|_| CtxExtError::CannotSetTokenCookie)?;
// -- Create CtxExtResult
Ctx::new(user.id)
.map(CtxW)
.map_err(|ex| CtxExtError::CtxCreateFail(ex.to_string()))
}
// region: --- Ctx Extractor
#[derive(Debug, Clone)]
pub struct CtxW(pub Ctx);
#[async_trait]
impl<S: Send + Sync> FromRequestParts<S> for CtxW {
type Rejection = Error;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self> {
debug!("{:<12} - Ctx", "EXTRACTOR");
parts
.extensions
.get::<CtxExtResult>()
.ok_or(Error::CtxExt(CtxExtError::CtxNotInRequestExt))?
.clone()
.map_err(Error::CtxExt)
}
}
// endregion: --- Ctx Extractor
// region: --- Ctx Extractor Result/Error
type CtxExtResult = core::result::Result<CtxW, CtxExtError>;
#[derive(Clone, Serialize, Debug)]
pub enum CtxExtError {
TokenNotInCookie,
TokenWrongFormat,
UserNotFound,
ModelAccessError(String),
FailValidate,
CannotSetTokenCookie,
CtxNotInRequestExt,
CtxCreateFail(String),
}
// endregion: --- Ctx Extractor Result/Error

View File

@@ -0,0 +1,74 @@
use crate::log::log_request;
use crate::web::mw_auth::CtxW;
use crate::web::mw_stamp::ReqStamp;
use crate::web::routes_rpc::RpcInfo;
use crate::web::{self};
use axum::http::{Method, Uri};
use axum::response::{IntoResponse, Response};
use axum::Json;
use serde_json::{json, to_value};
use std::sync::Arc;
use tracing::debug;
use uuid::Uuid;
pub async fn mw_reponse_map(
ctx: Option<CtxW>,
uri: Uri,
req_method: Method,
req_stamp: ReqStamp,
res: Response,
) -> Response {
let ctx = ctx.map(|ctx| ctx.0);
debug!("{:<12} - mw_reponse_map", "RES_MAPPER");
let uuid = Uuid::new_v4();
let rpc_info = res.extensions().get::<Arc<RpcInfo>>().map(Arc::as_ref);
// -- Get the eventual response error.
let web_error = res.extensions().get::<Arc<web::Error>>().map(Arc::as_ref);
let client_status_error = web_error.map(|se| se.client_status_and_error());
// -- If client error, build the new reponse.
let error_response =
client_status_error
.as_ref()
.map(|(status_code, client_error)| {
let client_error = to_value(client_error).ok();
let message = client_error.as_ref().and_then(|v| v.get("message"));
let detail = client_error.as_ref().and_then(|v| v.get("detail"));
let client_error_body = json!({
"id": rpc_info.as_ref().map(|rpc| rpc.id.clone()),
"error": {
"message": message, // Variant name
"data": {
"req_uuid": uuid.to_string(),
"detail": detail
},
}
});
debug!("CLIENT ERROR BODY:\n{client_error_body}");
// Build the new response from the client_error_body
(*status_code, Json(client_error_body)).into_response()
});
// -- Build and log the server log line.
let client_error = client_status_error.unzip().1;
// TODO: Need to hander if log_request fail (but should not fail request)
let _ = log_request(
req_method,
uri,
req_stamp,
rpc_info,
ctx,
web_error,
client_error,
)
.await;
debug!("\n");
error_response.unwrap_or(res)
}

View File

@@ -0,0 +1,46 @@
use crate::web::{Error, Result};
use async_trait::async_trait;
use axum::body::Body;
use axum::extract::FromRequestParts;
use axum::http::request::Parts;
use axum::http::Request;
use axum::middleware::Next;
use axum::response::Response;
use lib_utils::time::now_utc;
use time::OffsetDateTime;
use tracing::debug;
use uuid::Uuid;
#[derive(Debug, Clone)]
pub struct ReqStamp {
pub uuid: Uuid,
pub time_in: OffsetDateTime,
}
pub async fn mw_req_stamp(mut req: Request<Body>, next: Next) -> Result<Response> {
debug!("{:<12} - mw_req_stamp_resolver", "MIDDLEWARE");
let time_in = now_utc();
let uuid = Uuid::new_v4();
req.extensions_mut().insert(ReqStamp { uuid, time_in });
Ok(next.run(req).await)
}
// region: --- ReqStamp Extractor
#[async_trait]
impl<S: Send + Sync> FromRequestParts<S> for ReqStamp {
type Rejection = Error;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self> {
debug!("{:<12} - ReqStamp", "EXTRACTOR");
parts
.extensions
.get::<ReqStamp>()
.cloned()
.ok_or(Error::ReqStampNotInResponseExt)
}
}
// endregion: --- ReqStamp Extractor

View File

@@ -0,0 +1,107 @@
use crate::web::{self, remove_token_cookie, Error, Result};
use axum::extract::State;
use axum::routing::post;
use axum::{Json, Router};
use lib_auth::pwd::{self, ContentToHash, SchemeStatus};
use lib_core::ctx::Ctx;
use lib_core::model::user::{UserBmc, UserForLogin};
use lib_core::model::ModelManager;
use serde::Deserialize;
use serde_json::{json, Value};
use tower_cookies::Cookies;
use tracing::debug;
pub fn routes(mm: ModelManager) -> Router {
Router::new()
.route("/api/login", post(api_login_handler))
.route("/api/logoff", post(api_logoff_handler))
.with_state(mm)
}
// region: --- Login
async fn api_login_handler(
State(mm): State<ModelManager>,
cookies: Cookies,
Json(payload): Json<LoginPayload>,
) -> Result<Json<Value>> {
debug!("{:<12} - api_login_handler", "HANDLER");
let LoginPayload {
username,
pwd: pwd_clear,
} = payload;
let root_ctx = Ctx::root_ctx();
// -- Get the user.
let user: UserForLogin = UserBmc::first_by_username(&root_ctx, &mm, &username)
.await?
.ok_or(Error::LoginFailUsernameNotFound)?;
let user_id = user.id;
// -- Validate the password.
let Some(pwd) = user.pwd else {
return Err(Error::LoginFailUserHasNoPwd { user_id });
};
let scheme_status = pwd::validate_pwd(
&ContentToHash {
salt: user.pwd_salt,
content: pwd_clear.clone(),
},
&pwd,
)
.map_err(|_| Error::LoginFailPwdNotMatching { user_id })?;
// -- Update password scheme if needed
if let SchemeStatus::Outdated = scheme_status {
debug!("pwd encrypt scheme outdated, upgrading.");
UserBmc::update_pwd(&root_ctx, &mm, user.id, &pwd_clear).await?;
}
// -- Set web token.
web::set_token_cookie(&cookies, &user.username, user.token_salt)?;
// Create the success body.
let body = Json(json!({
"result": {
"success": true
}
}));
Ok(body)
}
#[derive(Debug, Deserialize)]
struct LoginPayload {
username: String,
pwd: String,
}
// endregion: --- Login
// region: --- Logoff
async fn api_logoff_handler(
cookies: Cookies,
Json(payload): Json<LogoffPayload>,
) -> Result<Json<Value>> {
debug!("{:<12} - api_logoff_handler", "HANDLER");
let should_logoff = payload.logoff;
if should_logoff {
remove_token_cookie(&cookies)?;
}
// Create the success body.
let body = Json(json!({
"result": {
"logged_off": should_logoff
}
}));
Ok(body)
}
#[derive(Debug, Deserialize)]
struct LogoffPayload {
logoff: bool,
}
// endregion: --- Logoff

View File

@@ -0,0 +1,80 @@
use crate::web::mw_auth::CtxW;
use axum::extract::State;
use axum::response::{IntoResponse, Response};
use axum::routing::post;
use axum::{Json, Router};
use lib_core::model::ModelManager;
use lib_rpc::router::RpcRouter;
use lib_rpc::{project_rpc, task_rpc, RpcRequest, RpcResources};
use serde_json::{json, Value};
use std::sync::Arc;
/// The RpcState is the Axum State that will
/// be used for the Axum RPC router handler.
///
/// Note: Not to be confused with the RpcResources that are for the lib-rpc
/// layer for the RpcRouter System. The RpcResources typically contains some elements
/// from the RpcState
#[derive(Clone)]
pub struct RpcState {
pub mm: ModelManager,
}
#[derive(Debug)]
pub struct RpcInfo {
pub id: Option<Value>,
pub method: String,
}
// Axum router for '/api/rpc'
pub fn routes(rpc_state: RpcState) -> Router {
// Build the combined RpcRouter.
let rpc_router = RpcRouter::new()
.extend(task_rpc::rpc_router())
.extend(project_rpc::rpc_router());
// Build the Axum Router for '/rpc'
Router::new()
.route("/rpc", post(rpc_axum_handler))
.with_state((rpc_state, Arc::new(rpc_router)))
}
async fn rpc_axum_handler(
State((rpc_state, rpc_router)): State<(RpcState, Arc<RpcRouter>)>,
ctx: CtxW,
Json(rpc_req): Json<RpcRequest>,
) -> Response {
let ctx = ctx.0;
// -- Create the RPC Info
// (will be set to the response.extensions)
let rpc_info = RpcInfo {
id: rpc_req.id.clone(),
method: rpc_req.method.clone(),
};
let rpc_method = &rpc_info.method;
let rpc_params = rpc_req.params;
let rpc_resources = RpcResources {
ctx: Some(ctx),
mm: rpc_state.mm,
};
// -- Exec Rpc Route
let res = rpc_router.call(rpc_method, rpc_resources, rpc_params).await;
// -- Build Rpc Success Response
let res = res.map(|v| {
let body_response = json!({
"id": rpc_info.id,
"result": v
});
Json(body_response)
});
// -- Create and Update Axum Response
let res: crate::web::Result<_> = res.map_err(crate::web::Error::from);
let mut res = res.into_response();
res.extensions_mut().insert(Arc::new(rpc_info));
res
}

View File

@@ -0,0 +1,18 @@
use crate::web_config;
use axum::handler::HandlerWithoutStateExt;
use axum::http::StatusCode;
use axum::routing::{any_service, MethodRouter};
use tower_http::services::ServeDir;
// Note: Here we can just return a MethodRouter rather than a full Router
// since ServeDir is a service.
pub fn serve_dir() -> MethodRouter {
async fn handle_404() -> (StatusCode, &'static str) {
(StatusCode::NOT_FOUND, "Resource not found.")
}
any_service(
ServeDir::new(&web_config().WEB_FOLDER)
.not_found_service(handle_404.into_service()),
)
}

View File

@@ -0,0 +1,11 @@
[package]
name = "gen-key"
version = "0.1.0"
edition = "2021"
[dependencies]
# -- App Crates
lib-utils = { path = "../../libs/lib-utils"}
# -- Others
rand = "0.8"
anyhow = "1" # Ok for tools/

View File

@@ -0,0 +1,14 @@
use anyhow::Result;
use lib_utils::b64::b64u_encode;
use rand::RngCore;
fn main() -> Result<()> {
let mut key = [0u8; 64]; // 512 bits = 64 bytes
rand::thread_rng().fill_bytes(&mut key);
println!("\nGenerated key from rand::thread_rng():\n{key:?}");
let b64u = b64u_encode(key);
println!("\nKey b64u encoded:\n{b64u}");
Ok(())
}

9
rustfmt.toml Normal file
View File

@@ -0,0 +1,9 @@
# rustfmt doc - https://rust-lang.github.io/rustfmt/
hard_tabs = true
edition = "2021"
# For recording
max_width = 85
# chain_width = 40
# array_width = 40

View File

@@ -0,0 +1,9 @@
-- DEV ONLY - Brute Force DROP DB (for local dev and unit test)
SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE
usename = 'app_user' OR datname = 'app_db';
DROP DATABASE IF EXISTS app_db;
DROP USER IF EXISTS app_user;
-- DEV ONLY - Dev only password (for local dev and unit test).
CREATE USER app_user PASSWORD 'dev_only_pwd';
CREATE DATABASE app_db owner app_user ENCODING = 'UTF-8';

View File

@@ -0,0 +1,58 @@
---- Base app schema
-- User
CREATE TABLE "user" (
id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1000) PRIMARY KEY,
username varchar(128) NOT NULL UNIQUE,
-- Auth
pwd varchar(256),
pwd_salt uuid NOT NULL DEFAULT gen_random_uuid(),
token_salt uuid NOT NULL DEFAULT gen_random_uuid(),
-- Timestamps
cid bigint NOT NULL,
ctime timestamp with time zone NOT NULL,
mid bigint NOT NULL,
mtime timestamp with time zone NOT NULL
);
-- Project
CREATE TABLE project (
-- PK
id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1000) PRIMARY KEY,
-- Properties
owner_id BIGINT NOT NULL,
name varchar(256) NOT NULL,
-- Timestamps
cid bigint NOT NULL,
ctime timestamp with time zone NOT NULL,
mid bigint NOT NULL,
mtime timestamp with time zone NOT NULL
);
-- Task
CREATE TABLE task (
-- PK
id BIGINT GENERATED BY DEFAULT AS IDENTITY (START WITH 1000) PRIMARY KEY,
-- FK
project_id BIGINT NOT NULL,
-- Properties
title varchar(256) NOT NULL,
done bool NOT NULL DEFAULT false,
-- Timestamps
cid bigint NOT NULL,
ctime timestamp with time zone NOT NULL,
mid bigint NOT NULL,
mtime timestamp with time zone NOT NULL
);
ALTER TABLE task ADD CONSTRAINT fk_project
FOREIGN KEY (project_id) REFERENCES project(id)
ON DELETE CASCADE;

View File

@@ -0,0 +1,9 @@
-- root user (at id = 0)
INSERT INTO "user"
(id, username, cid, ctime, mid, mtime) VALUES
(0, 'root', 0, now(), 0, now());
-- User demo1
INSERT INTO "user"
(username, cid, ctime, mid, mtime) VALUES
('demo1', 0, now(), 0, now());

13
web-folder/index.html Normal file
View File

@@ -0,0 +1,13 @@
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>AWESOME-APP Web</title>
</head>
<body>
Hello <strong>World!</strong>
</body>
</html>