Compare commits
17 Commits
de38e20d3a
...
dev/v1-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f915e72cf5 | ||
|
|
33cb0aaa33 | ||
|
|
891982ede0 | ||
|
|
bf2e1f2af6 | ||
|
|
6a5a45bf20 | ||
|
|
9e58839491 | ||
|
|
7e5e9cb32f | ||
|
|
a34dbc60c4 | ||
|
|
daf6c7c16a | ||
|
|
cade85d576 | ||
|
|
6834c66974 | ||
|
|
366862831c | ||
|
|
8273e610cb | ||
|
|
e6a32eab51 | ||
|
|
c3493cbe6f | ||
|
|
93b7e46655 | ||
|
|
27c94f4276 |
@@ -1,8 +0,0 @@
|
||||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,3 +3,6 @@
|
||||
.vscode
|
||||
.DS_Store
|
||||
.env
|
||||
conf.toml
|
||||
config.toml
|
||||
.fleet\
|
||||
|
||||
2846
Cargo.lock
generated
2846
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
40
Cargo.toml
40
Cargo.toml
@@ -4,25 +4,31 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[workspace]
|
||||
members = [".", "migration"]
|
||||
|
||||
[dependencies]
|
||||
async-trait = "0.1.81"
|
||||
axum = {version = "0.7.5", features = ["macros"]}
|
||||
axum-extra = { version = "0.9.3", features = ["typed-header"] }
|
||||
chrono = {version = "0.4", features = ["serde"]}
|
||||
deadpool-diesel = {version ="0.6.1", features = ["postgres"]}
|
||||
diesel = { version = "2", features = ["postgres", "chrono"] }
|
||||
dotenvy = "0.15"
|
||||
jsonwebtoken = "9.3.0"
|
||||
serde = { version = "1.0.202", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
tokio = { version = "1.37.0", features = ["full"] }
|
||||
tower = "0.4.13"
|
||||
tower-http = {version= "0.5.2", features=["trace", "cors"] }
|
||||
axum = { version = "0.8" }
|
||||
axum-extra = {version = "0.10", features = ["typed-header", "typed-routing"] }
|
||||
axum-macros = "0.5"
|
||||
sea-orm = { version = "1.1.16", features = [
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
"chrono",
|
||||
] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
once_cell = "1.19.0"
|
||||
axum-macros = "0.4.1"
|
||||
dotenvy = "0.15.7"
|
||||
toml = "0.9.7"
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"] }
|
||||
rand_core ={version = "0.6", features = ["std"]}
|
||||
regex = {version = "1.10"}
|
||||
rand_core = { version = "0.9.3", features = ["std"] }
|
||||
jsonwebtoken = "9"
|
||||
once_cell = "1.21.3"
|
||||
tower-http = {version= "0.6", features = ["trace", "cors"] }
|
||||
tower = "0.5.2"
|
||||
rand = "0.9.2"
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
# For documentation on how to configure this file,
|
||||
# see https://diesel.rs/guides/configuring-diesel-cli
|
||||
|
||||
[print_schema]
|
||||
file = "src/schema.rs"
|
||||
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
|
||||
|
||||
[migrations_directory]
|
||||
dir = "/data/codes/helios-server-rs/migrations"
|
||||
22
migration/Cargo.toml
Normal file
22
migration/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "migration"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "migration"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||
|
||||
[dependencies.sea-orm-migration]
|
||||
version = "1.1.12"
|
||||
features = [
|
||||
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||
# e.g.
|
||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||
]
|
||||
41
migration/README.md
Normal file
41
migration/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Running Migrator CLI
|
||||
|
||||
- Generate a new migration file
|
||||
```sh
|
||||
cargo run -- generate MIGRATION_NAME
|
||||
```
|
||||
- Apply all pending migrations
|
||||
```sh
|
||||
cargo run
|
||||
```
|
||||
```sh
|
||||
cargo run -- up
|
||||
```
|
||||
- Apply first 10 pending migrations
|
||||
```sh
|
||||
cargo run -- up -n 10
|
||||
```
|
||||
- Rollback last applied migrations
|
||||
```sh
|
||||
cargo run -- down
|
||||
```
|
||||
- Rollback last 10 applied migrations
|
||||
```sh
|
||||
cargo run -- down -n 10
|
||||
```
|
||||
- Drop all tables from the database, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- fresh
|
||||
```
|
||||
- Rollback all applied migrations, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- refresh
|
||||
```
|
||||
- Rollback all applied migrations
|
||||
```sh
|
||||
cargo run -- reset
|
||||
```
|
||||
- Check the status of all migrations
|
||||
```sh
|
||||
cargo run -- status
|
||||
```
|
||||
24
migration/src/lib.rs
Normal file
24
migration/src/lib.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
mod m20250525_000001_create_ledger_table_category;
|
||||
mod m20250525_000002_create_ledger_table_book;
|
||||
mod m20250525_000003_create_ledger_table_tag;
|
||||
mod m20250525_000004_create_ledger_table_account;
|
||||
mod m20250525_000005_create_ledger_table_transaction;
|
||||
mod m20250921_000001_create_ledger_table_transaction_tag_rel;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20250525_000001_create_ledger_table_category::Migration),
|
||||
Box::new(m20250525_000002_create_ledger_table_book::Migration),
|
||||
Box::new(m20250525_000003_create_ledger_table_tag::Migration),
|
||||
Box::new(m20250525_000004_create_ledger_table_account::Migration),
|
||||
Box::new(m20250525_000005_create_ledger_table_transaction::Migration),
|
||||
Box::new(m20250921_000001_create_ledger_table_transaction_tag_rel::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000001_create_ledger_table_category" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Category::Table)
|
||||
.col(
|
||||
ColumnDef::new(Category::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Category::Name).string().not_null())
|
||||
.col(ColumnDef::new(Category::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Category::ParentId).big_integer().default(0i64).not_null())
|
||||
.col(ColumnDef::new(Category::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Category::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Category::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Category::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Category {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
ParentId,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
63
migration/src/m20250525_000002_create_ledger_table_book.rs
Normal file
63
migration/src/m20250525_000002_create_ledger_table_book.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000002_create_ledger_table_book" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
// Define how to apply this migration
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Book::Table)
|
||||
.col(
|
||||
ColumnDef::new(Book::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Book::Name).string().not_null())
|
||||
.col(ColumnDef::new(Book::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Book::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Book::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Book::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Book::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Book {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
62
migration/src/m20250525_000003_create_ledger_table_tag.rs
Normal file
62
migration/src/m20250525_000003_create_ledger_table_tag.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000001_create_ledger_table_tag" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Tag::Table)
|
||||
.col(
|
||||
ColumnDef::new(Tag::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Tag::Name).string().not_null())
|
||||
.col(ColumnDef::new(Tag::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Tag::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Tag::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Tag::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Tag::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Tag {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000004_create_ledger_table_account" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Account::Table)
|
||||
.col(
|
||||
ColumnDef::new(Account::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Account::Name).string().not_null())
|
||||
.col(ColumnDef::new(Account::Type).integer().not_null())
|
||||
.col(ColumnDef::new(Account::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Account::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Account::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Account::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Account::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Account {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
Type,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000005_create_ledger_table_transaction" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Transaction::Table)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Transaction::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Transaction::Type).integer().not_null())
|
||||
.col(ColumnDef::new(Transaction::BookId).big_integer().not_null())
|
||||
.col(
|
||||
ColumnDef::new(Transaction::CategoryId)
|
||||
.big_integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(ColumnDef::new(Transaction::Description).string().not_null())
|
||||
.col(
|
||||
ColumnDef::new(Transaction::TransactionTime)
|
||||
.timestamp_with_time_zone()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::IsDeleted)
|
||||
.boolean()
|
||||
.default(false)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Transaction::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Transaction {
|
||||
Table,
|
||||
Id,
|
||||
Uid,
|
||||
Type,
|
||||
BookId,
|
||||
CategoryId,
|
||||
Description,
|
||||
TransactionTime,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
use crate::sea_query;
|
||||
use sea_orm_migration::{MigrationName, MigrationTrait, SchemaManager};
|
||||
use crate::{async_trait, ColumnDef, DbErr, Expr, Iden, Table};
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for crate::m20250921_000001_create_ledger_table_transaction_tag_rel::Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250921_000001_create_ledger_table_transaction_tag_rel" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for crate::m20250921_000001_create_ledger_table_transaction_tag_rel::Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Table)
|
||||
.col(
|
||||
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::TransactionId).big_integer().not_null())
|
||||
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::TagId).big_integer().not_null())
|
||||
.col(
|
||||
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::IsDeleted)
|
||||
.boolean()
|
||||
.default(false)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum TransactionTagRel {
|
||||
Table,
|
||||
Id,
|
||||
Uid,
|
||||
TransactionId,
|
||||
TagId,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
6
migration/src/main.rs
Normal file
6
migration/src/main.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[async_std::main]
|
||||
async fn main() {
|
||||
cli::run_cli(migration::Migrator).await;
|
||||
}
|
||||
@@ -1,6 +0,0 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
|
||||
DROP FUNCTION IF EXISTS diesel_set_updated_at();
|
||||
@@ -1,36 +0,0 @@
|
||||
-- This file was automatically created by Diesel to setup helper functions
|
||||
-- and other internal bookkeeping. This file is safe to edit, any future
|
||||
-- changes will be added to existing projects as new migrations.
|
||||
|
||||
|
||||
|
||||
|
||||
-- Sets up a trigger for the given table to automatically set a column called
|
||||
-- `updated_at` whenever the row is modified (unless `updated_at` was included
|
||||
-- in the modified columns)
|
||||
--
|
||||
-- # Example
|
||||
--
|
||||
-- ```sql
|
||||
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
|
||||
--
|
||||
-- SELECT diesel_manage_updated_at('users');
|
||||
-- ```
|
||||
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
|
||||
BEGIN
|
||||
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
|
||||
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
|
||||
BEGIN
|
||||
IF (
|
||||
NEW IS DISTINCT FROM OLD AND
|
||||
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
|
||||
) THEN
|
||||
NEW.updated_at := current_timestamp;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
@@ -1,11 +0,0 @@
|
||||
-- This file should undo anything in `up.sql`
|
||||
-- This file should undo anything in `up.sql`
|
||||
DROP TABLE IF EXISTS "categories";
|
||||
DROP TABLE IF EXISTS "tags";
|
||||
DROP TABLE IF EXISTS "books";
|
||||
DROP TABLE IF EXISTS "transactions";
|
||||
DROP TABLE IF EXISTS "transaction_tag_rels";
|
||||
DROP TABLE IF EXISTS "accounts";
|
||||
DROP TABLE IF EXISTS "amounts";
|
||||
|
||||
DROP TABLE IF EXISTS "users";
|
||||
@@ -1,89 +0,0 @@
|
||||
-- Your SQL goes here
|
||||
-- Your SQL goes here
|
||||
CREATE TABLE "categories" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"book_id" BIGINT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"level" INT NOT NULL DEFAULT 0,
|
||||
"parent_category_id" BIGINT NOT NULL DEFAULT 0,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "tags" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"book_id" BIGINT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"level" INT NOT NULL DEFAULT 0,
|
||||
"parent_tag_id" BIGINT NOT NULL DEFAULT 0,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "books" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "transactions" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"book_id" BIGINT NOT NULL,
|
||||
"description" TEXT NOT NULL,
|
||||
"category_id" BIGINT NOT NULL,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"time" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT current_timestamp,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "transaction_tag_rels" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"transaction_id" BIGINT NOT NULL,
|
||||
"tag_id" BIGINT NOT NULL,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "accounts" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"account_type" BIGINT NOT NULL DEFAULT 0,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "amounts" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"uid" BIGINT NOT NULL,
|
||||
"account_id" BIGINT NOT NULL,
|
||||
"transaction_id" BIGINT NOT NULL,
|
||||
"value" BIGINT NOT NULL DEFAULT 0,
|
||||
"expo" BIGINT NOT NULL DEFAULT 5,
|
||||
"currency" TEXT NOT NULL DEFAULT '',
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
|
||||
CREATE TABLE "users" (
|
||||
"id" BIGSERIAL PRIMARY KEY,
|
||||
"username" TEXT NOT NULL UNIQUE,
|
||||
"password" TEXT NOT NULL,
|
||||
"mail" TEXT NOT NULL,
|
||||
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
|
||||
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
|
||||
);
|
||||
62
src/api/account.rs
Normal file
62
src/api/account.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::account::{
|
||||
ActiveModel as AccountActiveModel, Column as AccountColumn, Model as AccountModel,
|
||||
};
|
||||
use crate::model::db::prelude::Account as AccountPrelude;
|
||||
use crate::model::http_body::account::{AccountReq, AccountResp};
|
||||
use crate::model::http_body::common::SimpleResponse;
|
||||
use crate::AppState;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::{ActiveModelTrait, DbErr, Iden, Set};
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update", post(update_account_handler))
|
||||
.route("/{id}", get(get_account_by_id_handler))
|
||||
.route(
|
||||
"/",
|
||||
post(create_account_handler).get(get_all_accounts_handler),
|
||||
)
|
||||
}
|
||||
|
||||
async fn update_account_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<AccountReq>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let mut active_model: AccountActiveModel = AccountPrelude::find_by_id(id)
|
||||
.filter(AccountColumn::Uid.eq(uid))
|
||||
.filter(AccountColumn::IsDeleted.eq(false))
|
||||
.one(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
match payload.name {
|
||||
Some(n) => {
|
||||
active_model.name = Set(n);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
active_model.updated_at = Set(Local::now().naive_utc());
|
||||
|
||||
active_model.update(&state.conn).await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let resp = SimpleResponse{
|
||||
code: 0,
|
||||
message: "".to_string()
|
||||
};
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
async fn get_account_by_id_handler() {}
|
||||
|
||||
async fn create_account_handler() {}
|
||||
|
||||
async fn get_all_accounts_handler() {}
|
||||
171
src/api/book.rs
Normal file
171
src/api/book.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::book::ActiveModel as BookActiveModel;
|
||||
use crate::model::db::book::Column as BookColumn;
|
||||
use crate::model::db::book::Model as BookModel;
|
||||
use crate::model::db::prelude::Book;
|
||||
use crate::model::http_body::book;
|
||||
use crate::model::http_body::book::{BookInfo, BookResp};
|
||||
use crate::model::http_body::common::SimpleResponse;
|
||||
use crate::AppState;
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use sea_orm::{ColumnTrait};
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update",post(update_book_handler))
|
||||
.route("/{id}",get(get_book_by_id_handler))
|
||||
.route("/", post(create_book_handler).get(get_all_books_handler))
|
||||
}
|
||||
|
||||
// handlers
|
||||
//
|
||||
#[debug_handler]
|
||||
async fn get_all_books_handler(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<book::BookResp>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let all_books = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let mut books: Vec<BookResp> = Vec::new();
|
||||
for b in all_books {
|
||||
let book_resp = BookResp {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
books.push(book_resp);
|
||||
}
|
||||
Ok(Json(books))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_book_by_id_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<BookResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let book_query = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.filter(BookColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let book_resp: BookResp;
|
||||
match book_query {
|
||||
Some(b) => {
|
||||
book_resp = BookResp {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
return Err((StatusCode::NOT_FOUND, "not_found".to_string()));
|
||||
}
|
||||
}
|
||||
Ok(Json(book_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn create_book_handler(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BookInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let book = BookActiveModel {
|
||||
name: Set(payload.name.clone().to_owned()),
|
||||
uid: Set(uid.to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let res = Book::insert(book).exec(&state.conn).await;
|
||||
let mut err_code: i64 = 0;
|
||||
let mut msg: String;
|
||||
match res {
|
||||
Ok(_) => {
|
||||
err_code = 0;
|
||||
msg = "ok".to_owned();
|
||||
}
|
||||
Err(e) => {
|
||||
err_code = 0;
|
||||
msg = e.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
let resp = SimpleResponse {
|
||||
code: err_code,
|
||||
message: msg,
|
||||
};
|
||||
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn update_book_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BookInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let exist_book = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.filter(BookColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let book: BookModel;
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "ok".to_owned(),
|
||||
};
|
||||
match exist_book {
|
||||
Ok(b) => match b {
|
||||
Some(bk) => {
|
||||
book = bk;
|
||||
}
|
||||
_ => return Err((StatusCode::NOT_FOUND, "not_found".to_string())),
|
||||
},
|
||||
Err(_) => {
|
||||
resp.code = 1;
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"connection_error".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut book_active_model: BookActiveModel = book.into();
|
||||
book_active_model.name = Set(payload.name.clone());
|
||||
book_active_model.updated_at = Set(Local::now().naive_utc());
|
||||
let update_res = book_active_model.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
resp.message = "ok".to_owned();
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"book_update_fail".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Json(resp))
|
||||
}
|
||||
198
src/api/book_test.rs
Normal file
198
src/api/book_test.rs
Normal file
@@ -0,0 +1,198 @@
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use axum::{
|
||||
// http::{Request, StatusCode},
|
||||
// Router,
|
||||
// routing::{get, put},
|
||||
// body::Body,
|
||||
// };
|
||||
// use sea_orm::{
|
||||
// MockDatabase, MockExecResult, DatabaseConnection, DatabaseTransaction,
|
||||
// entity::prelude::*,
|
||||
// QueryFilter, Condition, DbErr, EntityTrait,
|
||||
// };
|
||||
// use serde_json::{json, Value};
|
||||
// use tower::ServiceExt;
|
||||
// use std::sync::Arc;
|
||||
//
|
||||
// // 模拟 Book 实体
|
||||
// #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
// #[sea_orm(table_name = "books")]
|
||||
// pub struct Model {
|
||||
// #[sea_orm(primary_key)]
|
||||
// pub id: i32,
|
||||
// pub title: String,
|
||||
// pub author: String,
|
||||
// }
|
||||
//
|
||||
// #[derive(Copy, Clone, Debug, EnumIter)]
|
||||
// pub enum Relation {}
|
||||
//
|
||||
// impl Related<super::book::Entity> for Entity {
|
||||
// fn to() -> RelationDef {
|
||||
// panic!("No relations defined")
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // 创建测试用的 Router
|
||||
// async fn setup_router(db: DatabaseConnection) -> Router {
|
||||
// Router::new()
|
||||
// .route("/books/:id", get(get_book_by_id).put(update_book_by_id))
|
||||
// .route("/books", get(get_all_book))
|
||||
// .with_state(Arc::new(db))
|
||||
// }
|
||||
//
|
||||
// // 测试 get_book_by_id
|
||||
// #[tokio::test]
|
||||
// async fn test_get_book_by_id() {
|
||||
// // 设置模拟数据库
|
||||
// let db = MockDatabase::new(DatabaseBackend::Postgres)
|
||||
// .append_query_results(vec![vec![Model {
|
||||
// id: 1,
|
||||
// title: "Test Book".to_string(),
|
||||
// author: "Test Author".to_string(),
|
||||
// }]])
|
||||
// .into_connection();
|
||||
//
|
||||
// let app = setup_router(db).await;
|
||||
//
|
||||
// // 构造请求
|
||||
// let request = Request::builder()
|
||||
// .uri("/books/1")
|
||||
// .method("GET")
|
||||
// .body(Body::empty())
|
||||
// .unwrap();
|
||||
//
|
||||
// // 发送请求
|
||||
// let response = app.oneshot(request).await.unwrap();
|
||||
// assert_eq!(response.status(), StatusCode::OK);
|
||||
//
|
||||
// // 解析响应
|
||||
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
|
||||
// let body: Value = serde_json::from_slice(&body).unwrap();
|
||||
// assert_eq!(
|
||||
// body,
|
||||
// json!({
|
||||
// "id": 1,
|
||||
// "title": "Test Book",
|
||||
// "author": "Test Author"
|
||||
// })
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// // 测试 get_book_by_id 未找到
|
||||
// #[tokio::test]
|
||||
// async fn test_get_book_by_id_not_found() {
|
||||
// let db = MockDatabase::new(DatabaseBackend::Postgres)
|
||||
// .append_query_results(vec![vec![] as Vec<Model>])
|
||||
// .into_connection();
|
||||
//
|
||||
// let app = setup_router(db).await;
|
||||
//
|
||||
// let request = Request::builder()
|
||||
// .uri("/books/999")
|
||||
// .method("GET")
|
||||
// .body(Body::empty())
|
||||
// .unwrap();
|
||||
//
|
||||
// let response = app.oneshot(request).await.unwrap();
|
||||
// assert_eq!(response.status(), StatusCode::NOT_FOUND);
|
||||
// }
|
||||
//
|
||||
// // 测试 update_book_by_id
|
||||
// #[tokio::test]
|
||||
// async fn test_update_book_by_id() {
|
||||
// let db = MockDatabase::new(DatabaseBackend::Postgres)
|
||||
// .append_query_results(vec![vec![Model {
|
||||
// id: 1,
|
||||
// title: "Updated Book".to_string(),
|
||||
// author: "Updated Author".to_string(),
|
||||
// }]])
|
||||
// .append_exec_results(vec![MockExecResult {
|
||||
// last_insert_id: 1,
|
||||
// rows_affected: 1,
|
||||
// }])
|
||||
// .into_connection();
|
||||
//
|
||||
// let app = setup_router(db).await;
|
||||
//
|
||||
// // 构造请求
|
||||
// let request = Request::builder()
|
||||
// .uri("/books/1")
|
||||
// .method("PUT")
|
||||
// .header("Content-Type", "application/json")
|
||||
// .body(Body::from(
|
||||
// json!({
|
||||
// "title": "Updated Book",
|
||||
// "author": "Updated Author"
|
||||
// })
|
||||
// .to_string(),
|
||||
// ))
|
||||
// .unwrap();
|
||||
//
|
||||
// // 发送请求
|
||||
// let response = app.oneshot(request).await.unwrap();
|
||||
// assert_eq!(response.status(), StatusCode::OK);
|
||||
//
|
||||
// // 解析响应
|
||||
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
|
||||
// let body: Value = serde_json::from_slice(&body).unwrap();
|
||||
// assert_eq!(
|
||||
// body,
|
||||
// json!({
|
||||
// "id": 1,
|
||||
// "title": "Updated Book",
|
||||
// "author": "Updated Author"
|
||||
// })
|
||||
// );
|
||||
// }
|
||||
//
|
||||
// // 测试 get_all_book
|
||||
// #[tokio::test]
|
||||
// async fn test_get_all_book() {
|
||||
// let db = MockDatabase::new(DatabaseBackend::Postgres)
|
||||
// .append_query_results(vec![vec![
|
||||
// Model {
|
||||
// id: 1,
|
||||
// title: "Book 1".to_string(),
|
||||
// author: "Author 1".to_string(),
|
||||
// },
|
||||
// Model {
|
||||
// id: 2,
|
||||
// title: "Book 2".to_string(),
|
||||
// author: "Author 2".to_string(),
|
||||
// },
|
||||
// ]])
|
||||
// .into_connection();
|
||||
//
|
||||
// let app = setup_router(db).await;
|
||||
//
|
||||
// let request = Request::builder()
|
||||
// .uri("/books")
|
||||
// .method("GET")
|
||||
// .body(Body::empty())
|
||||
// .unwrap();
|
||||
//
|
||||
// let response = app.oneshot(request).await.unwrap();
|
||||
// assert_eq!(response.status(), StatusCode::OK);
|
||||
//
|
||||
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
|
||||
// let body: Value = serde_json::from_slice(&body).unwrap();
|
||||
// assert_eq!(
|
||||
// body,
|
||||
// json!([
|
||||
// {
|
||||
// "id": 1,
|
||||
// "title": "Book 1",
|
||||
// "author": "Author 1"
|
||||
// },
|
||||
// {
|
||||
// "id": 2,
|
||||
// "title": "Book 2",
|
||||
// "author": "Author 2"
|
||||
// }
|
||||
// ])
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
217
src/api/category.rs
Normal file
217
src/api/category.rs
Normal file
@@ -0,0 +1,217 @@
|
||||
use crate::api::category;
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::prelude::Category;
|
||||
use crate::model::db::{
|
||||
category::ActiveModel as CategoryActiveModel, category::Column as CategoryColumn,
|
||||
category::Model as CategoryModel,
|
||||
};
|
||||
use crate::model::http_body::category::CategoryInfo;
|
||||
use crate::model::http_body::common::{OptionalI64, SimpleResponse};
|
||||
use crate::AppState;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::QueryFilter;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use tokio::join;
|
||||
|
||||
pub fn get_nested_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update", post(update_category_by_id))
|
||||
.route("/{id}", get(get_category_by_id))
|
||||
.route("/", post(create_category).get(get_all_categories))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_all_categories(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<CategoryInfo>>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let categories_query = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await;
|
||||
let category_models = match categories_query {
|
||||
Ok(categories) => categories,
|
||||
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
};
|
||||
let mut category_resp: Vec<CategoryInfo> = Vec::new();
|
||||
for category in category_models {
|
||||
let category_info = CategoryInfo {
|
||||
id: category.id.into(),
|
||||
name: category.name,
|
||||
parent_id: category.parent_id.into(),
|
||||
};
|
||||
category_resp.push(category_info);
|
||||
}
|
||||
Ok(Json(category_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn create_category(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
payload: Json<CategoryInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let parent_id: i64 = match payload.parent_id {
|
||||
OptionalI64(pid_opt) => pid_opt.unwrap_or_else(|| 0),
|
||||
};
|
||||
let category_active_model = CategoryActiveModel {
|
||||
name: Set(payload.name.clone()),
|
||||
uid: Set(uid),
|
||||
parent_id: Set(parent_id),
|
||||
..Default::default()
|
||||
};
|
||||
let insert_res = Category::insert(category_active_model)
|
||||
.exec(&state.conn)
|
||||
.await;
|
||||
match insert_res {
|
||||
Ok(_) => Ok(Json(SimpleResponse {
|
||||
code: 0,
|
||||
message: "success".to_string(),
|
||||
})),
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_category_by_id(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<CategoryInfo>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let category_query_res = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let category_query: CategoryModel = match category_query_res {
|
||||
Ok(r) => match r {
|
||||
Some(res) => res,
|
||||
None => return Err((StatusCode::NOT_FOUND, "not found".to_string())),
|
||||
},
|
||||
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
};
|
||||
let category_resp = CategoryInfo {
|
||||
id: category_query.id.into(),
|
||||
name: category_query.name.clone(),
|
||||
parent_id: category_query.parent_id.into(),
|
||||
};
|
||||
Ok(Json(category_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn update_category_by_id(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
payload: Json<CategoryInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let mut parent_category_required = false;
|
||||
let mut parent_id: i64 = 0;
|
||||
let category_query = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::Id.eq(id))
|
||||
.one(&state.conn);
|
||||
let parent_query = match payload.parent_id {
|
||||
OptionalI64(Some(cid)) => {
|
||||
if cid > 0 {
|
||||
parent_category_required = true;
|
||||
parent_id = cid;
|
||||
Some(
|
||||
Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::ParentId.eq(cid))
|
||||
.one(&state.conn),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
OptionalI64(None) => None,
|
||||
};
|
||||
|
||||
let (category_result, parent_result) = if let Some(parent_query) = parent_query {
|
||||
// 并发执行两个查询
|
||||
let (category, parent) = join!(category_query, parent_query);
|
||||
// 处理查询结果
|
||||
(
|
||||
category.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
parent.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
} else {
|
||||
// 只查询 category
|
||||
(
|
||||
category_query.await.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
None,
|
||||
)
|
||||
};
|
||||
|
||||
let category = match category_result {
|
||||
Some(category) => {
|
||||
category
|
||||
}
|
||||
None => {
|
||||
return Err((StatusCode::NOT_FOUND, "Category not found".to_string()));
|
||||
}
|
||||
};
|
||||
let parent_category_valid = match parent_result {
|
||||
Some(_) => { true }
|
||||
None => { false }
|
||||
};
|
||||
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "success".to_string(),
|
||||
};
|
||||
let mut category_active_model : CategoryActiveModel = category.into();
|
||||
category_active_model.name = Set(payload.name.clone());
|
||||
category_active_model.updated_at = Set(Local::now().naive_utc());
|
||||
if parent_category_required && parent_id > 0 {
|
||||
category_active_model.parent_id = Set(parent_id.into());
|
||||
}
|
||||
|
||||
let update_res = category_active_model.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
resp.message = "ok".to_owned();
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"category update failed".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if parent_category_required && !parent_category_valid {
|
||||
resp.code = 1;
|
||||
resp.message = "Parent category not found".to_string();
|
||||
return Err((StatusCode::NOT_FOUND, "Parent category not found".to_string()));
|
||||
}
|
||||
Ok(Json(resp))
|
||||
}
|
||||
5
src/api/mod.rs
Normal file
5
src/api/mod.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
pub mod book;
|
||||
pub mod category;
|
||||
pub mod tag;
|
||||
pub mod transaction;
|
||||
pub mod account;
|
||||
170
src/api/tag.rs
Normal file
170
src/api/tag.rs
Normal file
@@ -0,0 +1,170 @@
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::tag::ActiveModel as TagActiveModel;
|
||||
use crate::model::db::tag::Column as TagColumn;
|
||||
use crate::model::db::tag::Model as TagModel;
|
||||
use crate::model::db::prelude::Tag;
|
||||
use crate::model::http_body::tag::{TagInfo, TagResp};
|
||||
use crate::model::http_body::common::SimpleResponse;
|
||||
use crate::AppState;
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use sea_orm::{ColumnTrait};
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update",post(update_tag))
|
||||
.route("/{id}",get(get_tag_by_id))
|
||||
.route("/", post(create_tag).get(get_all_tags))
|
||||
}
|
||||
|
||||
// handlers
|
||||
//
|
||||
#[debug_handler]
|
||||
async fn get_all_tags(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<TagResp>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let all_tags = Tag::find()
|
||||
.filter(TagColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let mut tags: Vec<TagResp> = Vec::new();
|
||||
for b in all_tags {
|
||||
let tag_resp = TagResp {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
tags.push(tag_resp);
|
||||
}
|
||||
Ok(Json(tags))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_tag_by_id(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<TagResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let tag_query = Tag::find()
|
||||
.filter(TagColumn::Uid.eq(uid))
|
||||
.filter(TagColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let tag_resp: TagResp;
|
||||
match tag_query {
|
||||
Some(b) => {
|
||||
tag_resp = TagResp {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
return Err((StatusCode::NOT_FOUND, "not_found".to_string()));
|
||||
}
|
||||
}
|
||||
Ok(Json(tag_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn create_tag(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<TagInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let tag = TagActiveModel {
|
||||
name: Set(payload.name.clone().to_owned()),
|
||||
uid: Set(uid.to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let res = Tag::insert(tag).exec(&state.conn).await;
|
||||
let mut err_code: i64 = 0;
|
||||
let mut msg: String;
|
||||
match res {
|
||||
Ok(_) => {
|
||||
err_code = 0;
|
||||
msg = "ok".to_owned();
|
||||
}
|
||||
Err(e) => {
|
||||
err_code = 0;
|
||||
msg = e.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
let resp = SimpleResponse {
|
||||
code: err_code,
|
||||
message: msg,
|
||||
};
|
||||
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn update_tag(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<TagInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let exist_tag = Tag::find()
|
||||
.filter(TagColumn::Uid.eq(uid))
|
||||
.filter(TagColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let tag: TagModel;
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "ok".to_owned(),
|
||||
};
|
||||
match exist_tag {
|
||||
Ok(b) => match b {
|
||||
Some(bk) => {
|
||||
tag = bk;
|
||||
}
|
||||
_ => return Err((StatusCode::NOT_FOUND, "not_found".to_string())),
|
||||
},
|
||||
Err(_) => {
|
||||
resp.code = 1;
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"connection_error".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut tag_active_model: TagActiveModel = tag.into();
|
||||
tag_active_model.name = Set(payload.name.clone());
|
||||
tag_active_model.updated_at = Set(Local::now().naive_utc());
|
||||
let update_res = tag_active_model.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
resp.message = "ok".to_owned();
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"tag_update_fail".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Json(resp))
|
||||
}
|
||||
217
src/api/transaction.rs
Normal file
217
src/api/transaction.rs
Normal file
@@ -0,0 +1,217 @@
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::category::Column as CategoryColumn;
|
||||
use crate::model::db::prelude::Category as CategoryPrelude;
|
||||
use crate::model::db::prelude::Transaction;
|
||||
use crate::model::db::transaction::{
|
||||
ActiveModel as TransactionActiveModel, Column as TransactionColumn, Model as TransactionModel,
|
||||
};
|
||||
use crate::model::http_body;
|
||||
use crate::model::http_body::book::BookInfo;
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::model::db::prelude::Tag as TagPrelude;
|
||||
use crate::model::db::tag::{
|
||||
ActiveModel as TagActiveModel, Column as TagColumn, Model as TagModel,
|
||||
};
|
||||
use crate::model::http_body::category::CategoryResp;
|
||||
use crate::model::http_body::common::SimpleResponse;
|
||||
use crate::model::http_body::transaction::{TransactionReq, TransactionResp};
|
||||
use crate::AppState;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use axum_macros::debug_handler;
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::{ColumnTrait, DatabaseConnection};
|
||||
use sea_orm::QueryFilter;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use serde_json::error::Category;
|
||||
use std::ptr::null;
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update", post(update_transaction_handler))
|
||||
.route("/{id}", get(get_transaction_by_id_handler))
|
||||
.route(
|
||||
"/",
|
||||
post(create_transaction_handler).get(get_all_transactions_handler),
|
||||
)
|
||||
}
|
||||
|
||||
async fn update_transaction_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<TransactionReq>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid;
|
||||
let exist_transaction = Transaction::find()
|
||||
.filter(TransactionColumn::Id.eq(id))
|
||||
.filter(TransactionColumn::Uid.eq(uid))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "".to_string(),
|
||||
};
|
||||
let transaction: TransactionModel;
|
||||
match exist_transaction {
|
||||
Err(_) => {
|
||||
resp.code = 1;
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"connection_error".to_string(),
|
||||
));
|
||||
}
|
||||
Ok(tra) => match tra {
|
||||
Some(tr) => {
|
||||
transaction = tr;
|
||||
}
|
||||
_ => return Err((StatusCode::NOT_FOUND, "Transaction not found".to_string())),
|
||||
},
|
||||
}
|
||||
let mut tr_active: TransactionActiveModel = transaction.into();
|
||||
match payload.description {
|
||||
None => {}
|
||||
Some(input_desc) => {
|
||||
tr_active.description = Set(input_desc);
|
||||
}
|
||||
}
|
||||
// TODO category
|
||||
let new_category_id: i64 = match payload.category_id {
|
||||
None => {
|
||||
return Err((
|
||||
StatusCode::BAD_REQUEST,
|
||||
"category_id is not valid".to_string(),
|
||||
))
|
||||
}
|
||||
Some(cid_string) => match cid_string.parse::<i64>() {
|
||||
Ok(cid) => cid,
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::BAD_REQUEST,
|
||||
"category_id is not valid".to_string(),
|
||||
))
|
||||
}
|
||||
},
|
||||
};
|
||||
let new_category_id_exist = CategoryPrelude::find()
|
||||
.filter(CategoryColumn::Id.eq(new_category_id))
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await;
|
||||
match new_category_id_exist {
|
||||
Ok(_) => {}
|
||||
Err(_) => {
|
||||
return Err((StatusCode::NOT_FOUND, "category_id not found".to_string()));
|
||||
}
|
||||
}
|
||||
|
||||
// TODO tags
|
||||
let tag_exist = check_tags_exist(&state.conn, payload.tags).await;
|
||||
let all_tag_exist: bool;
|
||||
match tag_exist {
|
||||
Ok(tag_res) => {
|
||||
all_tag_exist = tag_res.values().all(|&exists| exists);
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((StatusCode::NOT_FOUND, "tag not found".to_string()));
|
||||
}
|
||||
}
|
||||
if !all_tag_exist {
|
||||
return Err((StatusCode::NOT_FOUND, "tag not found".to_string()));
|
||||
}
|
||||
|
||||
// TODO amounts
|
||||
|
||||
// Update
|
||||
tr_active.updated_at = Set(Local::now().naive_utc());
|
||||
let update_res = tr_active.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"transaction_update_failed".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
async fn create_transaction_handler() {}
|
||||
|
||||
async fn get_transaction_by_id_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<TransactionResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let transaction_query = Transaction::find()
|
||||
.filter(TransactionColumn::Uid.eq(id))
|
||||
.filter(TransactionColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let response: TransactionResp;
|
||||
match transaction_query {
|
||||
None => {
|
||||
return Err((StatusCode::NOT_FOUND, "Transaction not found".to_string()));
|
||||
}
|
||||
Some(x) => {
|
||||
response = TransactionResp {
|
||||
id: x.id,
|
||||
description: x.description,
|
||||
category: CategoryResp {
|
||||
id: 0,
|
||||
name: "".to_string(),
|
||||
parent_id: 0,
|
||||
},
|
||||
tags: vec![],
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Json(response))
|
||||
}
|
||||
|
||||
async fn get_all_transactions_handler() {}
|
||||
|
||||
// 批量检查 TagModel 是否存在
|
||||
async fn check_tags_exist(
|
||||
connection: &DatabaseConnection,
|
||||
ids: Vec<String>,
|
||||
) -> Result<HashMap<i64, bool>, String> {
|
||||
// 将 Vec<String> 转换为 Vec<i64>,并处理可能的转换错误
|
||||
let ids_i64: Vec<i64> = ids
|
||||
.into_iter()
|
||||
.filter_map(|id| id.parse::<i64>().ok())
|
||||
.collect();
|
||||
|
||||
if ids_i64.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
// 构建 IN 查询条件
|
||||
let condition = Condition::any().add(TagColumn::Id.is_in(ids_i64.clone()));
|
||||
|
||||
// 执行批量查询,获取存在的 TagModel
|
||||
let found_tags = TagPrelude::find()
|
||||
.filter(condition)
|
||||
.all(connection)
|
||||
.await
|
||||
.map_err(|e| format!("Database error: {}", e))?;
|
||||
|
||||
// 创建 HashMap 记录每个 ID 是否存在
|
||||
let mut result = HashMap::new();
|
||||
for id in ids_i64 {
|
||||
result.insert(id, found_tags.iter().any(|tag| tag.id == id));
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
1
src/dal/mod.rs
Normal file
1
src/dal/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod transaction;
|
||||
0
src/dal/transaction.rs
Normal file
0
src/dal/transaction.rs
Normal file
1
src/dao/mod.rs
Normal file
1
src/dao/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod transaction_tag_rel;
|
||||
130
src/dao/transaction_tag_rel.rs
Normal file
130
src/dao/transaction_tag_rel.rs
Normal file
@@ -0,0 +1,130 @@
|
||||
use sea_orm::{DatabaseConnection, EntityTrait, ActiveModelTrait, Set, QueryFilter, ColumnTrait, DbErr};
|
||||
use sea_orm::sqlx::types::chrono::{Local};
|
||||
use crate::model::db::prelude::TransactionTagRel as TransactionTagRelPrelude;
|
||||
use crate::model::db::transaction_tag_rel::{
|
||||
ActiveModel as TransactionTagRelActiveModel,
|
||||
Model as TransactionTagRelModel,
|
||||
Column as TransactionTagRelColumn,
|
||||
};
|
||||
|
||||
// DAO struct for TransactionTagRel
|
||||
pub struct TransactionTagRelDAO {
|
||||
db: DatabaseConnection,
|
||||
}
|
||||
|
||||
impl TransactionTagRelDAO {
|
||||
// Constructor
|
||||
pub fn new(db: DatabaseConnection) -> Self {
|
||||
Self { db }
|
||||
}
|
||||
|
||||
// Create a new TransactionTagRel
|
||||
pub async fn create(
|
||||
&self,
|
||||
uid: i64,
|
||||
transaction_id: i64,
|
||||
tag_id: i64,
|
||||
) -> Result<TransactionTagRelModel, DbErr> {
|
||||
let active_model = TransactionTagRelActiveModel {
|
||||
uid: Set(uid),
|
||||
transaction_id: Set(transaction_id),
|
||||
tag_id: Set(tag_id),
|
||||
is_deleted: Set(false),
|
||||
// created_at: Set(Utc::now()),
|
||||
// updated_at: Set(Utc::now()),
|
||||
..Default::default() // id is auto-incremented
|
||||
};
|
||||
|
||||
active_model.insert(&self.db).await
|
||||
}
|
||||
|
||||
// Find by ID
|
||||
pub async fn find_by_id(&self, id: i64, uid:i64) -> Result<Option<TransactionTagRelModel>, DbErr> {
|
||||
TransactionTagRelPrelude::find_by_id(id)
|
||||
.filter(TransactionTagRelColumn::Uid.eq(uid))
|
||||
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
|
||||
.one(&self.db)
|
||||
.await
|
||||
}
|
||||
|
||||
// Find all by transaction_id
|
||||
pub async fn find_by_transaction_id(
|
||||
&self,
|
||||
transaction_id: i64,
|
||||
uid: i64,
|
||||
) -> Result<Vec<TransactionTagRelModel>, DbErr> {
|
||||
TransactionTagRelPrelude::find()
|
||||
.filter(TransactionTagRelColumn::Uid.eq(uid))
|
||||
.filter(TransactionTagRelColumn::TransactionId.eq(transaction_id))
|
||||
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
|
||||
.all(&self.db)
|
||||
.await
|
||||
}
|
||||
|
||||
// Find all by tag_id
|
||||
pub async fn find_by_tag_id(
|
||||
&self,
|
||||
tag_id: i64,
|
||||
uid: i64,
|
||||
) -> Result<Vec<TransactionTagRelModel>, DbErr> {
|
||||
TransactionTagRelPrelude::find()
|
||||
.filter(TransactionTagRelColumn::Uid.eq(uid))
|
||||
.filter(TransactionTagRelColumn::TagId.eq(tag_id))
|
||||
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
|
||||
.all(&self.db)
|
||||
.await
|
||||
}
|
||||
|
||||
// Update a TransactionTagRel
|
||||
pub async fn update(
|
||||
&self,
|
||||
id: i64,
|
||||
uid: Option<i64>,
|
||||
transaction_id: Option<i64>,
|
||||
tag_id: Option<i64>,
|
||||
) -> Result<TransactionTagRelModel, DbErr> {
|
||||
let mut active_model: TransactionTagRelActiveModel = TransactionTagRelPrelude::find_by_id(id)
|
||||
.filter(TransactionTagRelColumn::Uid.eq(uid))
|
||||
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
|
||||
.one(&self.db)
|
||||
.await?
|
||||
.ok_or(DbErr::RecordNotFound("TransactionTagRel not found".into()))?
|
||||
.into();
|
||||
|
||||
if let Some(uid) = uid {
|
||||
active_model.uid = Set(uid);
|
||||
}
|
||||
if let Some(transaction_id) = transaction_id {
|
||||
active_model.transaction_id = Set(transaction_id);
|
||||
}
|
||||
if let Some(tag_id) = tag_id {
|
||||
active_model.tag_id = Set(tag_id);
|
||||
}
|
||||
active_model.updated_at = Set(Local::now().naive_utc());
|
||||
|
||||
active_model.update(&self.db).await
|
||||
}
|
||||
|
||||
// Soft delete (set is_deleted = true)
|
||||
pub async fn soft_delete(&self, id: i64, uid: i64) -> Result<(), DbErr> {
|
||||
let mut active_model: TransactionTagRelActiveModel = TransactionTagRelPrelude::find_by_id(id)
|
||||
.filter(TransactionTagRelColumn::Uid.eq(uid))
|
||||
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
|
||||
.one(&self.db)
|
||||
.await?
|
||||
.ok_or(DbErr::RecordNotFound("TransactionTagRel not found".into()))?
|
||||
.into();
|
||||
|
||||
active_model.is_deleted = Set(true);
|
||||
active_model.updated_at = Set(Local::now().naive_utc());
|
||||
active_model.update(&self.db).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Hard delete (optional, use with caution)
|
||||
pub async fn hard_delete(&self, id: i64, uid: i64) -> Result<(), DbErr> {
|
||||
TransactionTagRelPrelude::delete_by_id(id)
|
||||
.exec(&self.db).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1,150 +0,0 @@
|
||||
// use std::sync::Arc;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use diesel::prelude::*;
|
||||
// use diesel::update;
|
||||
use serde::{Deserialize, Serialize};
|
||||
// use serde_json::to_string;
|
||||
use crate::model::db_model;
|
||||
use crate::model::schema;
|
||||
use crate::util;
|
||||
use crate::util::req::CommonResp;
|
||||
use chrono::prelude::*;
|
||||
use tracing::info;
|
||||
use crate::middleware::auth;
|
||||
use crate::middleware::auth::Claims;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateAccountRequest {
|
||||
name: String,
|
||||
account_type: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CreateAccountResponse {
|
||||
id: i64,
|
||||
name: String,
|
||||
account_type: i64,
|
||||
}
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/", post(create_account).get(get_all_accounts))
|
||||
.route("/:id", post(update_account).get(get_account))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_account(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateAccountRequest>,
|
||||
) -> Result<Json<db_model::Account>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let new_account = db_model::AccountForm {
|
||||
name: payload.name,
|
||||
account_type: payload.account_type,
|
||||
uid: uid,
|
||||
};
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::insert_into(schema::accounts::table)
|
||||
.values(&new_account)
|
||||
.returning(db_model::Account::as_returning())
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn update_account(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateAccountRequest>,
|
||||
) -> Result<Json<CommonResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let now = Utc::now().naive_utc();
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::update(schema::accounts::table)
|
||||
.filter(schema::accounts::id.eq(id))
|
||||
.filter(schema::accounts::uid.eq(uid))
|
||||
.set((
|
||||
schema::accounts::name.eq(payload.name),
|
||||
schema::accounts::account_type.eq(payload.account_type),
|
||||
schema::accounts::update_at.eq(now),
|
||||
))
|
||||
.execute(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
let resp = util::req::CommonResp { code: 0 };
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
pub async fn get_account(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<db_model::Account>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::accounts::table
|
||||
.filter(schema::accounts::id.eq(id))
|
||||
.filter(schema::accounts::uid.eq(uid))
|
||||
.select(db_model::Account::as_select())
|
||||
.limit(1)
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_accounts(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<db_model::Account>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::accounts::table
|
||||
.filter(schema::accounts::uid.eq(uid))
|
||||
.select(db_model::Account::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
@@ -1,145 +0,0 @@
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use diesel::prelude::*;
|
||||
// use diesel::update;
|
||||
use serde::{Deserialize, Serialize};
|
||||
// use serde_json::to_string;
|
||||
use crate::model::db_model;
|
||||
use crate::model::schema;
|
||||
use crate::util;
|
||||
use crate::util::req::CommonResp;
|
||||
use chrono::prelude::*;
|
||||
use tracing::info;
|
||||
use crate::middleware::auth;
|
||||
use crate::middleware::auth::Claims;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateBookRequest {
|
||||
name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CreateBookResponse {
|
||||
id: i64,
|
||||
name: String,
|
||||
}
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/", post(create_book).get(get_all_books))
|
||||
.route("/:id", post(update_book).get(get_book))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_book(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateBookRequest>,
|
||||
) -> Result<Json<db_model::Book>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let new_book = db_model::BookForm {
|
||||
name: payload.name,
|
||||
uid,
|
||||
};
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::insert_into(schema::books::table)
|
||||
.values(&new_book)
|
||||
.returning(db_model::Book::as_returning())
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn update_book(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateBookRequest>,
|
||||
) -> Result<Json<CommonResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let now = Utc::now().naive_utc();
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::update(schema::books::table)
|
||||
.filter(schema::books::id.eq(id))
|
||||
.filter(schema::books::uid.eq(uid))
|
||||
.set((
|
||||
schema::books::name.eq(payload.name),
|
||||
schema::books::update_at.eq(now),
|
||||
))
|
||||
.execute(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
let resp = util::req::CommonResp { code: 0 };
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
pub async fn get_book(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<db_model::Book>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::books::table
|
||||
.filter(schema::books::id.eq(id))
|
||||
.filter(schema::books::uid.eq(uid))
|
||||
.select(db_model::Book::as_select())
|
||||
.limit(1)
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_books(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<db_model::Book>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::books::table
|
||||
.filter(schema::books::uid.eq(uid))
|
||||
.select(db_model::Book::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
@@ -1,162 +0,0 @@
|
||||
// use std::sync::Arc;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use diesel::prelude::*;
|
||||
// use diesel::update;
|
||||
use serde::{Deserialize, Serialize};
|
||||
// use serde_json::to_string;
|
||||
use crate::model::db_model;
|
||||
use crate::model::schema;
|
||||
use crate::util;
|
||||
// use crate::model::schema::categories::dsl::categories;
|
||||
use crate::util::req::CommonResp;
|
||||
use chrono::prelude::*;
|
||||
use tracing::info;
|
||||
use crate::middleware::auth;
|
||||
use crate::middleware::auth::Claims;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CreateCategoryResponse {
|
||||
id: i64,
|
||||
name: String,
|
||||
level: i32,
|
||||
parent_category_id: i64,
|
||||
book_id: i64,
|
||||
}
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/", post(create_category).get(get_all_categories))
|
||||
.route("/:id", post(update_category).get(get_category))
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateCategoryRequest {
|
||||
name: String,
|
||||
level: i32,
|
||||
parent_category_id: i64,
|
||||
book_id: i64,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_category(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateCategoryRequest>,
|
||||
) -> Result<Json<db_model::Category>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone(); // TODO replace with actual user id.
|
||||
// let ret = CreateCategoryResponse{id: 134132413541, name: "24532452".to_string()};
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let new_category = db_model::CategoryForm {
|
||||
name: payload.name,
|
||||
uid: uid,
|
||||
level: payload.level,
|
||||
parent_category_id: payload.parent_category_id,
|
||||
book_id: payload.book_id,
|
||||
};
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::insert_into(schema::categories::table)
|
||||
.values(&new_category)
|
||||
.returning(db_model::Category::as_returning())
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
// let ret = CreateCategoryResponse{id: res.id, name: res.name};
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn update_category(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateCategoryRequest>,
|
||||
) -> Result<Json<CommonResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone(); // TODO replace with actual user id.
|
||||
// let ret = CreateCategoryResponse{id: 134132413541, name: "24532452".to_string()};
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let now = Utc::now().naive_utc();
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::update(schema::categories::table)
|
||||
.filter(schema::categories::id.eq(id))
|
||||
.filter(schema::categories::uid.eq(uid))
|
||||
.set((
|
||||
schema::categories::name.eq(payload.name),
|
||||
schema::categories::level.eq(payload.level),
|
||||
schema::categories::parent_category_id.eq(payload.parent_category_id),
|
||||
schema::categories::update_at.eq(now),
|
||||
))
|
||||
.execute(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
// let ret = CreateCategoryResponse{id: res.id, name: res.name};
|
||||
let resp = util::req::CommonResp { code: 0 };
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
pub async fn get_category(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<db_model::Category>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::categories::table
|
||||
.filter(schema::categories::id.eq(id))
|
||||
.filter(schema::categories::uid.eq(uid))
|
||||
.select(db_model::Category::as_select())
|
||||
.limit(1)
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_categories(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<db_model::Category>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::categories::table
|
||||
.filter(schema::categories::uid.eq(uid))
|
||||
.select(db_model::Category::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
@@ -1,157 +0,0 @@
|
||||
// use std::sync::Arc;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use diesel::prelude::*;
|
||||
// use diesel::update;
|
||||
use serde::{Deserialize, Serialize};
|
||||
// use serde_json::to_string;
|
||||
use crate::model::db_model;
|
||||
use crate::model::schema;
|
||||
use crate::util;
|
||||
use crate::util::req::CommonResp;
|
||||
use chrono::prelude::*;
|
||||
use tracing::info;
|
||||
use crate::middleware::auth;
|
||||
use crate::middleware::auth::Claims;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct CreateTagRequest {
|
||||
book_id: i64,
|
||||
name: String,
|
||||
level: i32,
|
||||
parent_tag_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CreateTagResponse {
|
||||
id: i64,
|
||||
name: String,
|
||||
book_id: i64,
|
||||
level: i32,
|
||||
parent_tag_id: i64,
|
||||
}
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/", post(create_tag).get(get_all_tags))
|
||||
.route("/:id", post(update_tag).get(get_tag))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_tag(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateTagRequest>,
|
||||
) -> Result<Json<db_model::Tag>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let new_tag = db_model::TagForm {
|
||||
book_id:payload.book_id,
|
||||
name: payload.name,
|
||||
uid: uid,
|
||||
level: payload.level,
|
||||
parent_tag_id: payload.parent_tag_id,
|
||||
};
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::insert_into(schema::tags::table)
|
||||
.values(&new_tag)
|
||||
.returning(db_model::Tag::as_returning())
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn update_tag(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<CreateTagRequest>,
|
||||
) -> Result<Json<CommonResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let now = Utc::now().naive_utc();
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::update(schema::tags::table)
|
||||
.filter(schema::tags::id.eq(id))
|
||||
.filter(schema::tags::uid.eq(uid))
|
||||
.set((
|
||||
schema::tags::name.eq(payload.name),
|
||||
schema::tags::level.eq(payload.level),
|
||||
schema::tags::parent_tag_id.eq(payload.parent_tag_id),
|
||||
schema::tags::update_at.eq(now),
|
||||
))
|
||||
.execute(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
let resp = util::req::CommonResp { code: 0 };
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
pub async fn get_tag(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<db_model::Tag>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::tags::table
|
||||
.filter(schema::tags::id.eq(id))
|
||||
.filter(schema::tags::uid.eq(uid))
|
||||
.select(db_model::Tag::as_select())
|
||||
.limit(1)
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_tags(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<db_model::Tag>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::tags::table
|
||||
.filter(schema::tags::uid.eq(uid))
|
||||
.select(db_model::Tag::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
@@ -1,503 +0,0 @@
|
||||
use axum::extract::Query;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use diesel::dsl::exists;
|
||||
use diesel::prelude::*;
|
||||
use std::fmt;
|
||||
use std::i64::MAX;
|
||||
use chrono::ParseResult;
|
||||
// use diesel::update;
|
||||
use serde::{Deserialize, Serialize};
|
||||
// use serde_json::to_string;
|
||||
use crate::middleware::auth;
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::{db_model,schema,req};
|
||||
use crate::util;
|
||||
use crate::util::req::CommonResp;
|
||||
use chrono::prelude::*;
|
||||
use tracing::info;
|
||||
use crate::model::req::{GetAmountByTransactionRangeParams, GetAmountParams, MAX_QUERY_LIMIT};
|
||||
|
||||
const PAYMENT_STORE_EXPO: i64 = 5;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SubmitTransactionRequest {
|
||||
description: String,
|
||||
book_id: i64,
|
||||
category_id: i64,
|
||||
tag_ids: Vec<i64>,
|
||||
time: String, // RFC 3339 "2020-04-12T22:10:57+02:00"
|
||||
amounts: Vec<SubmitTransactionAmountRequest>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct SubmitTransactionAmountRequest {
|
||||
account_id: i64,
|
||||
payment: String,
|
||||
expo: i32,
|
||||
currency: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CreateTransactionResponse {
|
||||
pub id: i64,
|
||||
pub book_id: i64,
|
||||
pub description: String,
|
||||
pub category_id: i64,
|
||||
pub time: chrono::DateTime<Utc>,
|
||||
pub tag_ids: Vec<i64>,
|
||||
pub amount_ids: Vec<i64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BatchGetTransactionRequest {
|
||||
pub transaction_ids: Vec<i64>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct BatchGetTransactionAmountRequest {
|
||||
pub transaction_ids: Vec<i64>,
|
||||
}
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/entry/batch_get", post(batch_get_transactions))
|
||||
.route(
|
||||
"/entry",
|
||||
post(create_transaction) // create new transaction entry with amount
|
||||
.get(get_all_transactions),// get all transactions with entry
|
||||
)
|
||||
.route("/entry/:id", get(get_transaction)) // get transaction entry
|
||||
.route("/amount/by_transaction_id", get(get_amounts_by_tid))
|
||||
.route("/amount/batch_get_by_transaction_id", post(batch_get_amounts_by_tid))
|
||||
.route("/amount", get(get_all_amounts_by_tid_range))
|
||||
// .route("/entry/amount/:id", post(update_amount).get(get_amount)) // require query param tid=transaction_id
|
||||
}
|
||||
// implementation, or do something in between.
|
||||
#[derive(Debug, Clone)]
|
||||
struct TransactionError;
|
||||
|
||||
impl fmt::Display for TransactionError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "invalid transaction insert result")
|
||||
}
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn create_transaction(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<SubmitTransactionRequest>,
|
||||
) -> Result<String, (StatusCode, String)> {
|
||||
// ) -> Result<Json<db_model::Transaction>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
|
||||
// 1. check related ids
|
||||
// 1.1 check book id
|
||||
if payload.book_id <= 0 {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid book id".to_string()));
|
||||
}
|
||||
let check_book = conn
|
||||
.interact(move |conn| {
|
||||
diesel::select(exists(
|
||||
schema::books::table
|
||||
.filter(schema::books::uid.eq(uid))
|
||||
.filter(schema::books::id.eq(payload.book_id)),
|
||||
))
|
||||
.get_result::<bool>(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
|
||||
println!("book valid: {}", check_book);
|
||||
if !check_book {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid book id".to_string()));
|
||||
}
|
||||
// 1.2 check category id
|
||||
if payload.category_id <= 0 {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid category id".to_string()));
|
||||
}
|
||||
let check_category = conn
|
||||
.interact(move |conn| {
|
||||
diesel::select(exists(
|
||||
schema::categories::table
|
||||
.filter(schema::categories::uid.eq(uid))
|
||||
.filter(schema::categories::id.eq(payload.category_id)),
|
||||
))
|
||||
.get_result::<bool>(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
|
||||
println!("category valid: {}", check_category);
|
||||
if !check_category {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid category id".to_string()));
|
||||
}
|
||||
// 1.3 check tag ids
|
||||
let payload_tag_size = payload.tag_ids.len() as i64;
|
||||
let mut check_tag = payload_tag_size == 0;
|
||||
if !check_tag {
|
||||
let check_tag_count = conn
|
||||
.interact(move |conn| {
|
||||
schema::tags::table
|
||||
.filter(schema::tags::uid.eq(uid))
|
||||
.filter(schema::tags::id.eq_any(payload.tag_ids))
|
||||
.select(diesel::dsl::count(schema::tags::id))
|
||||
.first(conn)
|
||||
.map(|x: i64| x as i64)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
println!("check tag: {}", check_tag_count);
|
||||
check_tag = check_tag_count == payload_tag_size;
|
||||
}
|
||||
|
||||
println!("tag valid: {}", check_tag);
|
||||
|
||||
if !check_tag {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid tag ids".to_string()));
|
||||
}
|
||||
|
||||
// 1.4 check account
|
||||
let mut check_amount = true;
|
||||
let mut amounts: Vec<db_model::AmountForm> = Vec::new();
|
||||
for amount_req in payload.amounts {
|
||||
// Parse and check payment
|
||||
let parse_payment_result =
|
||||
util::math::parse_payment_to_value_expo(amount_req.payment.clone(), PAYMENT_STORE_EXPO);
|
||||
let value: i64;
|
||||
let expo: i64;
|
||||
match parse_payment_result {
|
||||
Ok((val, expon)) => {
|
||||
value = val;
|
||||
expo = expon;
|
||||
}
|
||||
Err(_) => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let amount = db_model::AmountForm {
|
||||
uid: uid,
|
||||
account_id: amount_req.account_id,
|
||||
transaction_id: 0,
|
||||
value: value,
|
||||
expo: expo,
|
||||
currency: amount_req.currency.clone(),
|
||||
};
|
||||
check_amount = check_amount && true;
|
||||
amounts.push(amount);
|
||||
}
|
||||
|
||||
if !check_amount || amounts.len() == 0 {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid amount".to_string()));
|
||||
}
|
||||
|
||||
// 2. build and insert into db
|
||||
let datetime_tz = chrono::DateTime::parse_from_rfc3339(payload.time.as_str());
|
||||
let datetime = match datetime_tz {
|
||||
Ok(dt) => dt,
|
||||
Err(_) => {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid datetime, must be RFC 3339".to_string()))
|
||||
}
|
||||
};
|
||||
let datetime_utc = datetime.with_timezone(&Utc);
|
||||
|
||||
let mut transaction_resp: CreateTransactionResponse;
|
||||
let mut amount_ids: Vec<i64> = Vec::new();
|
||||
|
||||
let transaction = conn
|
||||
.interact(move |conn| {
|
||||
conn.transaction(|conn| {
|
||||
let new_transaction = db_model::TransactionForm {
|
||||
id: None,
|
||||
uid: uid,
|
||||
book_id: payload.book_id,
|
||||
description: payload.description,
|
||||
category_id: payload.category_id,
|
||||
// time: payload
|
||||
time: datetime_utc,
|
||||
};
|
||||
let inserted_transactions = diesel::insert_into(schema::transactions::table)
|
||||
.values(&new_transaction)
|
||||
.returning(db_model::Transaction::as_returning())
|
||||
.get_results(conn);
|
||||
|
||||
let mut new_tr_vec: Vec<db_model::Transaction>;
|
||||
match inserted_transactions {
|
||||
Ok(tr) => new_tr_vec = tr,
|
||||
Err(e) => {
|
||||
return diesel::result::QueryResult::Err(e);
|
||||
}
|
||||
}
|
||||
let mut new_tid = 0 as i64;
|
||||
let new_tr = new_tr_vec.get(0);
|
||||
match new_tr {
|
||||
Some(tr) =>new_tid = tr.id,
|
||||
None => new_tid = 0,
|
||||
}
|
||||
if new_tid <= 0 {
|
||||
return diesel::result::QueryResult::Err(diesel::result::Error::NotFound);
|
||||
}
|
||||
for amount in amounts.iter_mut() {
|
||||
amount.transaction_id = new_tid;
|
||||
}
|
||||
let inserted_amounts = diesel::insert_into(schema::amounts::table)
|
||||
.values(&amounts)
|
||||
.returning(db_model::Amount::as_returning())
|
||||
.get_results(conn);
|
||||
let new_amounts: Vec<db_model::Amount> = match inserted_amounts {
|
||||
Ok(ams) => ams,
|
||||
Err(_) => Vec::new(),
|
||||
};
|
||||
for am in new_amounts {
|
||||
amount_ids.push(am.id)
|
||||
};
|
||||
diesel::result::QueryResult::Ok(())
|
||||
})
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
|
||||
// 3. build response data.
|
||||
|
||||
// Ok(Json(res))
|
||||
Ok("finish".to_string())
|
||||
}
|
||||
|
||||
pub async fn update_transaction(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<SubmitTransactionRequest>,
|
||||
) -> Result<Json<CommonResp>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let now = Utc::now().naive_utc();
|
||||
let datetime_tz = chrono::DateTime::parse_from_rfc3339(payload.time.as_str());
|
||||
let datetime = match datetime_tz {
|
||||
Ok(dt) => dt,
|
||||
Err(_) => {
|
||||
return Err((StatusCode::BAD_REQUEST, "invalid datetime, must be RFC 3339".to_string()))
|
||||
}
|
||||
};
|
||||
let datetime_utc = datetime.with_timezone(&Utc);
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
diesel::update(schema::transactions::table)
|
||||
.filter(schema::transactions::id.eq(id))
|
||||
.filter(schema::transactions::uid.eq(uid))
|
||||
.set((
|
||||
schema::transactions::category_id.eq(payload.category_id),
|
||||
schema::transactions::description.eq(payload.description),
|
||||
schema::transactions::time.eq(datetime_utc),
|
||||
schema::transactions::update_at.eq(now),
|
||||
))
|
||||
.execute(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
let resp = util::req::CommonResp { code: 0 };
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
pub async fn get_transaction(
|
||||
Path(id): Path<i64>,
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<db_model::Transaction>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::transactions::table
|
||||
.filter(schema::transactions::id.eq(id))
|
||||
.filter(schema::transactions::uid.eq(uid))
|
||||
.select(db_model::Transaction::as_select())
|
||||
.limit(1)
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_transactions(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Query(queryParams): Query<req::GetTransactionsQueryParams>,
|
||||
) -> Result<Json<Vec<db_model::Transaction>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let offset = match queryParams.start {
|
||||
None => {0}
|
||||
Some(start) => if start > 0 {start-1} else {0}
|
||||
};
|
||||
let limit = match queryParams.limit {
|
||||
None => {1 as i32}
|
||||
Some(limit_num) => {
|
||||
if(limit_num > req::MAX_QUERY_LIMIT) {
|
||||
req::MAX_QUERY_LIMIT
|
||||
} else if(limit_num < 1) {
|
||||
1 as i32
|
||||
} else {
|
||||
limit_num
|
||||
}
|
||||
}
|
||||
};
|
||||
let res = conn
|
||||
.interact(move |conn| {
|
||||
schema::transactions::table.filter(schema::transactions::uid.eq(uid))
|
||||
.offset(offset)
|
||||
.limit(limit as i64)
|
||||
.select(db_model::Transaction::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn batch_get_transactions(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BatchGetTransactionRequest>,
|
||||
) -> Result<Json<Vec<db_model::Transaction>>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
if payload.transaction_ids.len() == 0 {
|
||||
return Err((StatusCode::BAD_REQUEST, "no transaction_id list".to_string()));
|
||||
}
|
||||
let conn = app_state.db.get().await.map_err(util::req::internal_error)?;
|
||||
let res = conn.interact(move |conn| {
|
||||
schema::transactions::table
|
||||
.filter(schema::transactions::uid.eq(uid))
|
||||
.filter(schema::transactions::is_delete.eq(false))
|
||||
.filter(schema::transactions::id.eq_any(payload.transaction_ids))
|
||||
.select(db_model::Transaction::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await.map_err(util::req::internal_error)?.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_amounts_by_tid(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Query(params): Query<GetAmountParams>,
|
||||
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
|
||||
info!(params.transaction_id);
|
||||
let tid = match params.transaction_id {
|
||||
None => 0,
|
||||
Some(idx) => idx,
|
||||
};
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
let res = conn.interact(move |conn| {
|
||||
schema::amounts::table
|
||||
.filter(schema::amounts::uid.eq(uid))
|
||||
.filter(schema::amounts::transaction_id.eq(tid))
|
||||
.select(db_model::Amount::as_select())
|
||||
.load(conn)
|
||||
}).await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn batch_get_amounts_by_tid(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BatchGetTransactionAmountRequest>,
|
||||
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
if payload.transaction_ids.len() == 0 {
|
||||
return Err((StatusCode::BAD_REQUEST, "no transaction_id list".to_string()));
|
||||
}
|
||||
let conn = app_state.db.get().await.map_err(util::req::internal_error)?;
|
||||
let res = conn.interact(move |conn| {
|
||||
schema::amounts::table
|
||||
.filter(schema::amounts::uid.eq(uid))
|
||||
.filter(schema::amounts::is_delete.eq(false))
|
||||
.filter(schema::amounts::transaction_id.eq_any(payload.transaction_ids))
|
||||
.select(db_model::Amount::as_select())
|
||||
.load(conn)
|
||||
})
|
||||
.await.map_err(util::req::internal_error)?.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
|
||||
pub async fn get_all_amounts_by_tid_range(
|
||||
State(app_state): State<crate::AppState>,
|
||||
claims: Claims,
|
||||
Query(params): Query<GetAmountByTransactionRangeParams>,
|
||||
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let tid_from = match params.transaction_id_from {
|
||||
None => {-1}
|
||||
Some(id) => {id}
|
||||
};
|
||||
let tid_to = match params.transaction_id_to {
|
||||
None => {-1}
|
||||
Some(id) => {id}
|
||||
};
|
||||
if uid <= 0 || tid_from <= 0 || tid_to <= 0 || tid_from > tid_to {
|
||||
return Err((StatusCode::BAD_REQUEST,"invalid values".to_string()));
|
||||
}
|
||||
let limit: i64 = match params.limit {
|
||||
None => {MAX_QUERY_LIMIT as i64}
|
||||
Some(i) => {
|
||||
if i <= 0 {
|
||||
MAX_QUERY_LIMIT as i64
|
||||
} else {
|
||||
i as i64
|
||||
}
|
||||
}
|
||||
};
|
||||
let conn = app_state.db.get()
|
||||
.await.map_err(util::req::internal_error)?;
|
||||
let res = conn.interact(move |conn| {
|
||||
schema::amounts::table
|
||||
.filter(schema::amounts::uid.eq(uid))
|
||||
.filter(schema::amounts::is_delete.eq(false))
|
||||
.filter(schema::amounts::transaction_id.ge(tid_from))
|
||||
.filter(schema::amounts::transaction_id.le(tid_to))
|
||||
.limit(limit)
|
||||
.select(db_model::Amount::as_select())
|
||||
.load(conn)
|
||||
}).await
|
||||
.map_err(util::req::internal_error)?
|
||||
.map_err(util::req::internal_error)?;
|
||||
Ok(Json(res))
|
||||
}
|
||||
192
src/main.rs
192
src/main.rs
@@ -1,87 +1,108 @@
|
||||
use std::env;
|
||||
use axum::{
|
||||
// http::StatusCode,
|
||||
// routing::{get, post},
|
||||
// Json,
|
||||
Router,
|
||||
};
|
||||
use axum::http::Method;
|
||||
// use pbkdf2::password_hash::Error;
|
||||
// use serde::{Deserialize, Serialize};
|
||||
use crate::middleware::auth;
|
||||
use axum::{http::Method, Router};
|
||||
use clap::Parser;
|
||||
use sea_orm::{Database, DatabaseConnection, Iden};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
use crate::util::pass::get_pbkdf2_from_psw;
|
||||
|
||||
// Project modules
|
||||
mod ledger;
|
||||
mod api;
|
||||
mod middleware;
|
||||
mod model;
|
||||
mod util;
|
||||
mod user;
|
||||
|
||||
// Passed App State
|
||||
#[derive(Clone)]
|
||||
pub struct AppState {
|
||||
db: deadpool_diesel::postgres::Pool,
|
||||
}
|
||||
mod query;
|
||||
mod dao;
|
||||
mod dal;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenvy::dotenv().unwrap();
|
||||
// dotenvy::dotenv().unwrap();
|
||||
// initialize tracing
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
|
||||
let args: Vec<String> = env::args().collect();
|
||||
|
||||
if args.len() <= 1 {
|
||||
return;
|
||||
}
|
||||
// initialize db connection
|
||||
let db_url = std::env::var("DATABASE_URL").unwrap();
|
||||
|
||||
let manager = deadpool_diesel::postgres::Manager::new(db_url, deadpool_diesel::Runtime::Tokio1);
|
||||
let pool = deadpool_diesel::postgres::Pool::builder(manager)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let shared_state = AppState { db: pool };
|
||||
let cmd = args[1].clone();
|
||||
|
||||
match cmd.as_str() {
|
||||
"add_user" => {
|
||||
println!("adding user");
|
||||
if args.len() <= 4 {
|
||||
println!("insufficient arg number");
|
||||
return;
|
||||
}
|
||||
let user = args[2].clone();
|
||||
let psw = args[3].clone();
|
||||
let mail = args[4].clone();
|
||||
println!("adding user {}", user);
|
||||
let hashed = get_pbkdf2_from_psw(psw);
|
||||
let mut hash_psw = "".to_string();
|
||||
match hashed {
|
||||
Ok(val) => {
|
||||
println!("get hash {}", val);
|
||||
hash_psw=val;
|
||||
}
|
||||
Err(_) => {}
|
||||
}
|
||||
let res = user::dal::add_user(shared_state, user, hash_psw, mail)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
_ => {
|
||||
println!("unknown command {}", cmd);
|
||||
let cli = Cli::parse();
|
||||
match cli.command {
|
||||
Command::Serve { config_path } => {
|
||||
if let Ok(config) = load_config(&config_path).await {
|
||||
println!("Loaded config.");
|
||||
println!("{},{}", config.service.host.clone(), config.service.port);
|
||||
// Proceed with server initialization using `config`
|
||||
start_server(&config).await;
|
||||
} else {
|
||||
eprintln!("Failed to load config from {}", config_path);
|
||||
}
|
||||
}
|
||||
Command::PrintExampleConfig {}=>{
|
||||
print_default_config().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
conn: DatabaseConnection,
|
||||
}
|
||||
|
||||
// Register routers
|
||||
#[derive(Deserialize,Serialize)]
|
||||
struct Key {
|
||||
jwt: String,
|
||||
user: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize,Serialize)]
|
||||
struct DatabaseConf {
|
||||
connection: String,
|
||||
}
|
||||
#[derive(Deserialize,Serialize)]
|
||||
struct ServiceConf {
|
||||
host: String,
|
||||
port: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize,Serialize)]
|
||||
struct Config {
|
||||
service: ServiceConf,
|
||||
database: DatabaseConf,
|
||||
keys: Key,
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
#[derive(clap::Subcommand)]
|
||||
enum Command {
|
||||
Serve {
|
||||
#[arg(long = "conf")]
|
||||
config_path: String,
|
||||
},
|
||||
PrintExampleConfig {},
|
||||
}
|
||||
async fn load_config(path: &str) -> Result<Config, Box<dyn std::error::Error>> {
|
||||
let content = tokio::fs::read_to_string(path).await?;
|
||||
let config: Config = toml::from_str(&content)?;
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
// ====== Commands ======
|
||||
|
||||
// start http server
|
||||
async fn start_server(config: &Config) {
|
||||
let conn = Database::connect(&config.database.connection)
|
||||
.await
|
||||
.expect("Database connection failed.");
|
||||
|
||||
auth::initialize_jwt_key(config.keys.jwt.clone());
|
||||
|
||||
let state = AppState { conn };
|
||||
// Build router
|
||||
let cors_layer = CorsLayer::new()
|
||||
.allow_methods([Method::GET, Method::POST])
|
||||
.allow_origin(Any);
|
||||
@@ -90,17 +111,36 @@ async fn main() {
|
||||
.layer(cors_layer);
|
||||
|
||||
let app = Router::new()
|
||||
// V1 apis
|
||||
.nest("/api/v1/category", ledger::category::get_nest_handlers())
|
||||
.nest("/api/v1/tag", ledger::tag::get_nest_handlers())
|
||||
.nest("/api/v1/book", ledger::book::get_nest_handlers())
|
||||
.nest("/api/v1/account", ledger::account::get_nest_handlers())
|
||||
.nest("/api/v1/transaction", ledger::transaction::get_nest_handlers())
|
||||
.nest("/api/v1/user", user::handler::get_nest_handlers())
|
||||
.with_state(shared_state)
|
||||
.nest("/api/v1/book", api::book::get_nest_handlers())
|
||||
.nest("/api/v1/category", api::category::get_nested_handlers())
|
||||
.nest("/api/v1/tag", api::tag::get_nest_handlers())
|
||||
.nest("/api/v1/transaction", api::transaction::get_nest_handlers())
|
||||
.with_state(state)
|
||||
.layer(global_layer);
|
||||
|
||||
let listener = tokio::net::TcpListener::bind("0.0.0.0:8987").await.unwrap();
|
||||
info!("starting server on 0.0.0.0:8987");
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
let host = config.service.host.clone();
|
||||
let port = config.service.port;
|
||||
let server_url = format!("{host}:{port}");
|
||||
let listener = tokio::net::TcpListener::bind(&server_url).await.unwrap();
|
||||
axum::serve(listener, app)
|
||||
.await
|
||||
.expect("Service panic happened");
|
||||
}
|
||||
|
||||
async fn print_default_config() {
|
||||
let example_conf = Config{
|
||||
service: ServiceConf {
|
||||
host: "localhost".to_string(),
|
||||
port: 8080,
|
||||
},
|
||||
database: DatabaseConf {
|
||||
connection: "postgres://postgres:postgres@localhost/test_db".to_string(),
|
||||
},
|
||||
keys: Key {
|
||||
jwt: "THIS_IS_TEST_CONFIG".to_string(),
|
||||
user: "test_user".to_string(),
|
||||
},
|
||||
};
|
||||
// 序列化为 TOML 字符串
|
||||
let toml_string = toml::to_string(&example_conf);
|
||||
println!("#This is an example config.\n{}", toml_string.unwrap());
|
||||
}
|
||||
@@ -1,23 +1,18 @@
|
||||
use axum::{
|
||||
async_trait,
|
||||
extract::FromRequestParts,
|
||||
http::{
|
||||
request::Parts,
|
||||
StatusCode,
|
||||
},
|
||||
Json, RequestPartsExt,
|
||||
http::{request::Parts, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
Json, RequestPartsExt,
|
||||
};
|
||||
use axum_extra::{
|
||||
headers::{authorization::Bearer, Authorization},
|
||||
TypedHeader,
|
||||
};
|
||||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use jsonwebtoken::{decode,encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use std::fmt::Display;
|
||||
use once_cell::sync::Lazy;
|
||||
use crate::util;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Claims {
|
||||
@@ -47,10 +42,15 @@ pub enum AuthError {
|
||||
InvalidToken,
|
||||
}
|
||||
|
||||
static KEYS: Lazy<Keys> = Lazy::new(|| {
|
||||
let secret = std::env::var("JWT_SECRET").expect("JWT_SECRET must be set");
|
||||
Keys::new(secret.as_bytes())
|
||||
});
|
||||
static KEYS: OnceLock<Keys> = OnceLock::new();
|
||||
|
||||
pub fn initialize_jwt_key(key_str: String) {
|
||||
let res = KEYS.set(Keys::new(key_str.as_bytes()));
|
||||
match res {
|
||||
Ok(_) => {}
|
||||
Err(_) => panic!("jwt key initialize failed"),
|
||||
}
|
||||
}
|
||||
|
||||
struct Keys {
|
||||
encoding: EncodingKey,
|
||||
@@ -81,27 +81,29 @@ impl AuthBody {
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<S> FromRequestParts<S> for Claims
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = (StatusCode, String);
|
||||
type Rejection = AuthError;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
||||
// Extract the token from the authorization header
|
||||
let TypedHeader(Authorization(bearer)) = parts
|
||||
.extract::<TypedHeader<Authorization<Bearer>>>()
|
||||
.await
|
||||
.map_err(util::req::internal_error)?;
|
||||
.map_err(|_| AuthError::InvalidToken)?;
|
||||
// Decode the user data
|
||||
let token_data = decode::<Claims>(bearer.token(), &KEYS.decoding, &Validation::default())
|
||||
.map_err(util::req::internal_error)?;
|
||||
let token_data = decode::<Claims>(
|
||||
bearer.token(),
|
||||
&KEYS.get().unwrap().decoding,
|
||||
&Validation::default(),
|
||||
)
|
||||
.map_err(|_| AuthError::InvalidToken)?;
|
||||
|
||||
Ok(token_data.claims)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoResponse for AuthError {
|
||||
fn into_response(self) -> Response {
|
||||
let (status, error_message) = match self {
|
||||
|
||||
22
src/model/db/account.rs
Normal file
22
src/model/db/account.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "account")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub account_type: i32,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
21
src/model/db/book.rs
Normal file
21
src/model/db/book.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "book")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
22
src/model/db/category.rs
Normal file
22
src/model/db/category.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "category")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub parent_id: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
10
src/model/db/mod.rs
Normal file
10
src/model/db/mod.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod account;
|
||||
pub mod book;
|
||||
pub mod category;
|
||||
pub mod tag;
|
||||
pub mod transaction;
|
||||
pub mod transaction_tag_rel;
|
||||
8
src/model/db/prelude.rs
Normal file
8
src/model/db/prelude.rs
Normal file
@@ -0,0 +1,8 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
pub use super::account::Entity as Account;
|
||||
pub use super::book::Entity as Book;
|
||||
pub use super::category::Entity as Category;
|
||||
pub use super::tag::Entity as Tag;
|
||||
pub use super::transaction::Entity as Transaction;
|
||||
pub use super::transaction_tag_rel::Entity as TransactionTagRel;
|
||||
21
src/model/db/tag.rs
Normal file
21
src/model/db/tag.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "tag")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
25
src/model/db/transaction.rs
Normal file
25
src/model/db/transaction.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "transaction")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub uid: i64,
|
||||
pub transaction_type: i32,
|
||||
pub book_id: i64,
|
||||
pub category_id: i64,
|
||||
pub description: String,
|
||||
pub transaction_time: DateTimeWithTimeZone,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
22
src/model/db/transaction_tag_rel.rs
Normal file
22
src/model/db/transaction_tag_rel.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "transaction_tag_rel")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub uid: i64,
|
||||
pub transaction_id: i64,
|
||||
pub tag_id: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,198 +0,0 @@
|
||||
use crate::model::schema;
|
||||
use diesel::prelude::*;
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::categories)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Category {
|
||||
#[serde(with = "string")]
|
||||
id: i64,
|
||||
uid: i64,
|
||||
name: String,
|
||||
level: i32,
|
||||
parent_category_id: i64,
|
||||
book_id: i64,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::categories)]
|
||||
pub struct CategoryForm {
|
||||
pub uid: i64,
|
||||
pub name: String,
|
||||
pub book_id: i64,
|
||||
pub level: i32,
|
||||
pub parent_category_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::tags)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Tag {
|
||||
id: i64,
|
||||
uid: i64,
|
||||
book_id: i64,
|
||||
name: String,
|
||||
level: i32,
|
||||
parent_tag_id: i64,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::tags)]
|
||||
pub struct TagForm {
|
||||
pub uid: i64,
|
||||
pub book_id: i64,
|
||||
pub name: String,
|
||||
pub level: i32,
|
||||
pub parent_tag_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::books)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Book {
|
||||
#[serde(with = "string")]
|
||||
id: i64,
|
||||
uid: i64,
|
||||
name: String,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::books)]
|
||||
pub struct BookForm {
|
||||
pub uid: i64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::accounts)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Account {
|
||||
id: i64,
|
||||
uid: i64,
|
||||
name: String,
|
||||
account_type: i64,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::accounts)]
|
||||
pub struct AccountForm {
|
||||
pub uid: i64,
|
||||
pub name: String,
|
||||
pub account_type: i64,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::transactions)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Transaction {
|
||||
#[serde(with = "string")]
|
||||
pub id: i64,
|
||||
uid: i64,
|
||||
#[serde(with = "string")]
|
||||
pub book_id: i64,
|
||||
pub description: String,
|
||||
#[serde(with = "string")]
|
||||
pub category_id: i64,
|
||||
pub time: chrono::DateTime<Utc>,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::transactions)]
|
||||
pub struct TransactionForm {
|
||||
pub id: Option<i64>,
|
||||
pub uid: i64,
|
||||
pub book_id: i64,
|
||||
pub description: String,
|
||||
pub category_id: i64,
|
||||
pub time: chrono::DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
|
||||
#[diesel(table_name = schema::amounts)]
|
||||
#[diesel(check_for_backend(diesel::pg::Pg))]
|
||||
pub struct Amount {
|
||||
pub id: i64,
|
||||
uid: i64,
|
||||
account_id: i64,
|
||||
transaction_id: i64,
|
||||
value: i64,
|
||||
expo: i64,
|
||||
currency: String,
|
||||
#[serde(skip_serializing)]
|
||||
is_delete: bool,
|
||||
create_at: chrono::NaiveDateTime,
|
||||
update_at: chrono::NaiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize, Insertable)]
|
||||
#[diesel(table_name = schema::amounts)]
|
||||
pub struct AmountForm {
|
||||
pub uid: i64,
|
||||
pub transaction_id: i64,
|
||||
pub account_id: i64,
|
||||
pub value: i64,
|
||||
pub expo: i64,
|
||||
pub currency: String,
|
||||
}
|
||||
|
||||
#[derive(Queryable, Selectable, serde::Serialize)]
|
||||
#[diesel(table_name = schema::users)]
|
||||
pub struct User {
|
||||
pub id: i64,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub mail: String,
|
||||
#[serde(skip_serializing)]
|
||||
pub is_delete: bool,
|
||||
}
|
||||
|
||||
#[derive(Insertable)]
|
||||
#[diesel(table_name = schema::users)]
|
||||
pub struct UserForm {
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
pub mail: String,
|
||||
}
|
||||
|
||||
mod string {
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{de, Serializer, Deserialize, Deserializer};
|
||||
|
||||
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where T: Display,
|
||||
S: Serializer
|
||||
{
|
||||
serializer.collect_str(value)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
||||
where T: FromStr,
|
||||
T::Err: Display,
|
||||
D: Deserializer<'de>
|
||||
{
|
||||
String::deserialize(deserializer)?.parse().map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
14
src/model/http_body/account.rs
Normal file
14
src/model/http_body/account.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct AccountResp {
|
||||
pub id: String,
|
||||
pub name: String,
|
||||
pub account_type: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Debug)]
|
||||
pub struct AccountReq {
|
||||
pub name: Option<String>,
|
||||
pub account_type: Option<String>,
|
||||
}
|
||||
14
src/model/http_body/book.rs
Normal file
14
src/model/http_body/book.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
use super::common::{number_stringify, OptionalI64};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BookResp {
|
||||
#[serde(with="number_stringify")]
|
||||
pub id: OptionalI64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize,Deserialize)]
|
||||
pub struct BookInfo {
|
||||
pub name: String,
|
||||
}
|
||||
17
src/model/http_body/category.rs
Normal file
17
src/model/http_body/category.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use super::common::{number_stringify, OptionalI64};
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CategoryResp {
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub parent_id: i64,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CategoryInfo {
|
||||
#[serde(with="number_stringify")]
|
||||
pub id: OptionalI64,
|
||||
pub name: String,
|
||||
#[serde(with="number_stringify")]
|
||||
pub parent_id: OptionalI64,
|
||||
}
|
||||
88
src/model/http_body/common.rs
Normal file
88
src/model/http_body/common.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{Display, Formatter, Result as FmtResult};
|
||||
use std::num::ParseIntError;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SimpleResponse {
|
||||
pub code: i64,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OptionalI64(pub Option<i64>);
|
||||
|
||||
impl OptionalI64 {
|
||||
// 构造函数:从 i64 创建 Some
|
||||
pub fn new(value: i64) -> Self {
|
||||
OptionalI64(Some(value))
|
||||
}
|
||||
|
||||
// 构造函数:直接创建 None
|
||||
pub fn none() -> Self {
|
||||
OptionalI64(None)
|
||||
}
|
||||
|
||||
// 从 Option<i64> 转换
|
||||
pub fn from_option(value: Option<i64>) -> Self {
|
||||
OptionalI64(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for OptionalI64 {
|
||||
fn from(value: i64) -> Self {
|
||||
OptionalI64(Some(value))
|
||||
}
|
||||
}
|
||||
impl From<Option<i64>> for OptionalI64 {
|
||||
fn from(value: Option<i64>) -> Self {
|
||||
OptionalI64(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for OptionalI64 {
|
||||
type Err = std::num::ParseIntError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if s.is_empty() || s.eq_ignore_ascii_case("null") {
|
||||
Ok(OptionalI64(None))
|
||||
} else {
|
||||
s.parse::<i64>().map(|n| OptionalI64(Some(n)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for OptionalI64 {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
match self.0 {
|
||||
Some(num) => write!(f, "{}", num), // 有值时输出数字
|
||||
None => write!(f, ""), // None 时输出空字符串
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod number_stringify {
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{de, Deserialize, Deserializer, Serializer};
|
||||
|
||||
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
T: Display,
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.collect_str(value)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
||||
where
|
||||
T: FromStr,
|
||||
T::Err: Display,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(deserializer)?
|
||||
.parse()
|
||||
.map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
pub mod book;
|
||||
pub mod common;
|
||||
pub mod category;
|
||||
pub mod tag;
|
||||
pub mod book;
|
||||
pub mod account;
|
||||
pub mod transaction;
|
||||
pub mod account;
|
||||
14
src/model/http_body/tag.rs
Normal file
14
src/model/http_body/tag.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
use super::common::{number_stringify, OptionalI64};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct TagResp {
|
||||
#[serde(with="number_stringify")]
|
||||
pub id: OptionalI64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize,Deserialize)]
|
||||
pub struct TagInfo {
|
||||
pub name: String,
|
||||
}
|
||||
25
src/model/http_body/transaction.rs
Normal file
25
src/model/http_body/transaction.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
use crate::model::http_body::tag::TagInfo;
|
||||
use crate::model::http_body::category::CategoryResp;
|
||||
use crate::model::http_body::common::OptionalI64;
|
||||
|
||||
#[derive(Serialize,Deserialize)]
|
||||
pub struct TransactionResp {
|
||||
pub id: i64,
|
||||
pub description: String,
|
||||
pub category: CategoryResp,
|
||||
pub tags: Vec<TagInfo>,
|
||||
}
|
||||
|
||||
#[derive(Serialize,Deserialize)]
|
||||
pub struct TransactionReq {
|
||||
pub id: Option<String>,
|
||||
pub description: Option<String>,
|
||||
pub category_id: Option<String>,
|
||||
pub tags: Vec<String>,
|
||||
}
|
||||
|
||||
pub struct TransactionAmountReq {
|
||||
pub id: Option<String>,
|
||||
pub account_id: Option<String>,
|
||||
}
|
||||
@@ -1,3 +1,2 @@
|
||||
pub mod db_model;
|
||||
pub mod schema;
|
||||
pub mod req;
|
||||
pub mod db;
|
||||
pub mod http_body;
|
||||
|
||||
@@ -1,42 +0,0 @@
|
||||
use std::fmt;
|
||||
use std::str::FromStr;
|
||||
use serde::{de, Deserialize, Deserializer};
|
||||
|
||||
pub const QUERY_ORDER_INCREASE:i32 = 0;
|
||||
pub const QUERY_ORDER_INVERT:i32 = 1;
|
||||
|
||||
pub const MAX_QUERY_LIMIT:i32 =1000;
|
||||
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct GetAmountParams {
|
||||
#[serde(default, deserialize_with="empty_string_as_none")]
|
||||
pub transaction_id: Option<i64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct GetAmountByTransactionRangeParams {
|
||||
pub transaction_id_from: Option<i64>,
|
||||
pub transaction_id_to: Option<i64>,
|
||||
pub limit: Option<i64>,
|
||||
}
|
||||
|
||||
// Serde deserialization decorator to map empty Strings to None,
|
||||
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
T: FromStr,
|
||||
T::Err: fmt::Display,
|
||||
{
|
||||
let opt = Option::<String>::deserialize(de)?;
|
||||
match opt.as_deref() {
|
||||
None | Some("") => Ok(None),
|
||||
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct GetTransactionsQueryParams {
|
||||
pub start: Option<i64>,
|
||||
pub limit: Option<i32>,
|
||||
}
|
||||
@@ -1,116 +0,0 @@
|
||||
// @generated automatically by Diesel CLI.
|
||||
|
||||
diesel::table! {
|
||||
accounts (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
name -> Text,
|
||||
account_type -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
amounts (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
account_id -> Int8,
|
||||
transaction_id -> Int8,
|
||||
value -> Int8,
|
||||
expo -> Int8,
|
||||
currency -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
books (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
name -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
categories (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
name -> Text,
|
||||
level -> Int4,
|
||||
parent_category_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
tags (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
name -> Text,
|
||||
level -> Int4,
|
||||
parent_tag_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
transaction_tag_rels (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
transaction_id -> Int8,
|
||||
tag_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
transactions (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
description -> Text,
|
||||
category_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
time -> Timestamptz,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
users (id) {
|
||||
id -> Int8,
|
||||
username -> Text,
|
||||
password -> Text,
|
||||
mail -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
accounts,
|
||||
amounts,
|
||||
books,
|
||||
categories,
|
||||
tags,
|
||||
transaction_tag_rels,
|
||||
transactions,
|
||||
users,
|
||||
);
|
||||
4
src/query/book.rs
Normal file
4
src/query/book.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
// use crate::model::db::prelude::Book;
|
||||
// pub fn get_book_by_id(id:i64, uid:i64)->Option<Book> {
|
||||
//
|
||||
// }
|
||||
1
src/query/mod.rs
Normal file
1
src/query/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod book;
|
||||
116
src/schema.rs
116
src/schema.rs
@@ -1,116 +0,0 @@
|
||||
// @generated automatically by Diesel CLI.
|
||||
|
||||
diesel::table! {
|
||||
accounts (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
name -> Text,
|
||||
account_type -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
amounts (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
account_id -> Int8,
|
||||
transaction_id -> Int8,
|
||||
value -> Int8,
|
||||
expo -> Int8,
|
||||
currency -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
books (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
name -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
categories (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
name -> Text,
|
||||
level -> Int4,
|
||||
parent_category_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
tags (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
name -> Text,
|
||||
level -> Int4,
|
||||
parent_tag_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
transaction_tag_rels (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
transaction_id -> Int8,
|
||||
tag_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
transactions (id) {
|
||||
id -> Int8,
|
||||
uid -> Int8,
|
||||
book_id -> Int8,
|
||||
description -> Text,
|
||||
category_id -> Int8,
|
||||
is_delete -> Bool,
|
||||
time -> Timestamptz,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::table! {
|
||||
users (id) {
|
||||
id -> Int8,
|
||||
username -> Text,
|
||||
password -> Text,
|
||||
mail -> Text,
|
||||
is_delete -> Bool,
|
||||
create_at -> Timestamp,
|
||||
update_at -> Timestamp,
|
||||
}
|
||||
}
|
||||
|
||||
diesel::allow_tables_to_appear_in_same_query!(
|
||||
accounts,
|
||||
amounts,
|
||||
books,
|
||||
categories,
|
||||
tags,
|
||||
transaction_tag_rels,
|
||||
transactions,
|
||||
users,
|
||||
);
|
||||
118
src/user/dal.rs
118
src/user/dal.rs
@@ -1,118 +0,0 @@
|
||||
use diesel::prelude::*;
|
||||
use crate::model::{db_model, schema};
|
||||
use std::error::Error;
|
||||
use std::fmt::Debug;
|
||||
use pbkdf2::password_hash::{PasswordHash, PasswordVerifier};
|
||||
use pbkdf2::Pbkdf2;
|
||||
use serde_json::json;
|
||||
|
||||
pub async fn add_user(app_state: crate::AppState, username: String, password: String, mail: String) -> Result<(), ()> {
|
||||
let conn = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(|_| {
|
||||
println!("fail to get db connection");
|
||||
()
|
||||
})?;
|
||||
let target_username = username.clone();
|
||||
// 1. check if current username exists.
|
||||
let res = conn.interact(
|
||||
move |conn| {
|
||||
schema::users::table
|
||||
.filter(schema::users::username.eq(target_username.clone()))
|
||||
.count()
|
||||
.get_result::<i64>(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(|_res| {
|
||||
()
|
||||
})?
|
||||
.map_err(|_res| {
|
||||
()
|
||||
})?;
|
||||
println!("ret {}", res);
|
||||
if res > 0 {
|
||||
println!("user already exists.");
|
||||
return Ok(());
|
||||
}
|
||||
let new_user_form = db_model::UserForm {
|
||||
username: username.clone(),
|
||||
password: password.clone(),
|
||||
mail: mail.clone(),
|
||||
};
|
||||
// 2. adding user
|
||||
let add_res = conn.interact(
|
||||
move |conn| {
|
||||
diesel::insert_into(schema::users::table)
|
||||
.values(&new_user_form)
|
||||
.returning(db_model::User::as_returning())
|
||||
.get_result(conn)
|
||||
})
|
||||
.await
|
||||
.map_err(|_e| {
|
||||
()
|
||||
})?
|
||||
.map_err(|_e| {
|
||||
()
|
||||
})?;
|
||||
let out = json!(add_res);
|
||||
println!("new user {}", out.to_string());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn check_user_psw(app_state: crate::AppState, username: String, password: String) -> bool {
|
||||
let conn_res = app_state
|
||||
.db
|
||||
.get()
|
||||
.await
|
||||
.map_err(|_| {
|
||||
println!("fail to get db connection");
|
||||
()
|
||||
});
|
||||
let conn = match conn_res {
|
||||
Ok(res) => res,
|
||||
Err(_err) => { return false; }
|
||||
};
|
||||
// 1. get psw hash
|
||||
let query_username = username.clone();
|
||||
let user_rr = conn.interact(
|
||||
|conn| {
|
||||
schema::users::table
|
||||
.filter(schema::users::username.eq(query_username))
|
||||
.select(db_model::User::as_select())
|
||||
.get_results(conn)
|
||||
})
|
||||
.await;
|
||||
|
||||
let user_res = match user_rr {
|
||||
Ok(res) => res,
|
||||
Err(_) => return false,
|
||||
};
|
||||
println!("get user_res success");
|
||||
let user = match user_res {
|
||||
Ok(u) => u,
|
||||
Err(_) => return false,
|
||||
};
|
||||
|
||||
println!("get user success");
|
||||
|
||||
if user.len() != 1 {
|
||||
return false;
|
||||
}
|
||||
println!("get uniq user success");
|
||||
let cur_user = user.get(0);
|
||||
let psw = match cur_user {
|
||||
Some(usr) => usr.password.clone(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
println!("comparing psw, get {}, stored {}.", password.clone(), psw.clone());
|
||||
|
||||
let hash_res = PasswordHash::new(psw.as_str());
|
||||
let hash = match hash_res {
|
||||
Ok(rs) => rs,
|
||||
Err(_) => return false,
|
||||
};
|
||||
let check_res = Pbkdf2.verify_password(password.as_bytes(), &hash);
|
||||
return check_res.is_ok();
|
||||
}
|
||||
@@ -1,29 +0,0 @@
|
||||
use axum::{
|
||||
extract::State, http::StatusCode, routing::post, Json, Router
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
use crate::middleware::auth::Claims;
|
||||
use super::dal::check_user_psw;
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/login", post(login))
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
pub struct LoginCredentialRequest {
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
pub async fn login(
|
||||
State(app_state): State<crate::AppState>,
|
||||
Json(payload): Json<LoginCredentialRequest>,
|
||||
) -> Result<(), (StatusCode, String)> {
|
||||
let res = check_user_psw(app_state, payload.username.clone(), payload.password.clone()).await;
|
||||
if !res {
|
||||
return Err((StatusCode::UNAUTHORIZED, "invalid credentials".to_string()));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
pub mod dal;
|
||||
pub mod handler;
|
||||
@@ -1,45 +0,0 @@
|
||||
use regex::Regex;
|
||||
|
||||
pub fn parse_payment_to_value_expo(payment_str: String, target_expo: i64) -> Result<(i64, i64), ()> {
|
||||
// 1. check format
|
||||
let re = Regex::new(r"[1-9]{0,9}[0-9]\.[0-9]{2,6}$").unwrap();
|
||||
let res_format = re.is_match(payment_str.as_str());
|
||||
if !res_format {
|
||||
return Err(())
|
||||
}
|
||||
let mut value: i64 = 0;
|
||||
let mut expo : i64 = 0;
|
||||
|
||||
let dot_index = payment_str.find('.');
|
||||
let (int_part, decimal_part) = match dot_index {
|
||||
Some(pos) => (&payment_str[..pos], &payment_str[pos+1..]),
|
||||
None => (payment_str.as_str(), ""),
|
||||
};
|
||||
|
||||
let mut dec_part_padding = format!("{:0<width$}", decimal_part, width=target_expo as usize);
|
||||
if dec_part_padding.len() > target_expo as usize {
|
||||
let pd = &dec_part_padding[..target_expo as usize];
|
||||
dec_part_padding = pd.to_string();
|
||||
}
|
||||
let num_str = format!("{}{}", int_part, dec_part_padding);
|
||||
println!("parsed num string \"{}\"", num_str);
|
||||
let num = num_str.parse::<i64>().unwrap();
|
||||
let value = num;
|
||||
let expo = target_expo;
|
||||
|
||||
Ok((value, expo))
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
#[test]
|
||||
fn test_parse_payment(){
|
||||
let r1 = parse_payment_to_value_expo("1.345".to_string(), 6);
|
||||
assert_eq!(r1, Ok((1345000, 6)));
|
||||
let r2 = parse_payment_to_value_expo("0.01".to_string(), 6);
|
||||
assert_eq!(r2, Ok((10000, 6)));
|
||||
let r3 = parse_payment_to_value_expo("0.10000001".to_string(), 6);
|
||||
assert_eq!(r3, Err(()));
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1 @@
|
||||
pub mod req;
|
||||
pub mod pass;
|
||||
pub mod math;
|
||||
@@ -1,14 +0,0 @@
|
||||
use axum::http::StatusCode;
|
||||
use serde::Serialize;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct CommonResp {
|
||||
pub code: i64,
|
||||
}
|
||||
|
||||
pub fn internal_error<E>(err: E) -> (StatusCode, String)
|
||||
where
|
||||
E: std::error::Error,
|
||||
{
|
||||
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
|
||||
}
|
||||
Reference in New Issue
Block a user