Compare commits
7 Commits
dev/v1
...
6834c66974
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6834c66974 | ||
|
|
366862831c | ||
|
|
8273e610cb | ||
|
|
e6a32eab51 | ||
|
|
c3493cbe6f | ||
|
|
93b7e46655 | ||
|
|
27c94f4276 |
0
.env-template
Normal file
0
.env-template
Normal file
3
.gitignore
vendored
3
.gitignore
vendored
@@ -2,4 +2,5 @@
|
||||
.idea
|
||||
.vscode
|
||||
.DS_Store
|
||||
|
||||
.env
|
||||
conf.toml
|
||||
3770
Cargo.lock
generated
Normal file
3770
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
25
Cargo.toml
@@ -4,5 +4,30 @@ version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[workspace]
|
||||
members = [".", "migration"]
|
||||
|
||||
[dependencies]
|
||||
axum = { version = "0.8" }
|
||||
axum-extra = {version = "0.10", features = ["typed-header", "typed-routing"] }
|
||||
axum-macros = "0.5"
|
||||
sea-orm = { version = "1.1.12", features = [
|
||||
"sqlx-postgres",
|
||||
"runtime-tokio-rustls",
|
||||
"macros",
|
||||
"chrono",
|
||||
] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1.0.140"
|
||||
tokio = { version = "1.0", features = ["full"] }
|
||||
tracing = "0.1"
|
||||
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
|
||||
dotenvy = "0.15.7"
|
||||
toml = "0.8.22"
|
||||
clap = { version = "4.0", features = ["derive"] }
|
||||
pbkdf2 = { version = "0.12", features = ["simple"] }
|
||||
rand_core = { version = "0.9.3", features = ["std"] }
|
||||
jsonwebtoken = "9"
|
||||
once_cell = "1.21.3"
|
||||
tower-http = {version= "0.6", features = ["trace", "cors"] }
|
||||
tower = "0.5.2"
|
||||
22
migration/Cargo.toml
Normal file
22
migration/Cargo.toml
Normal file
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "migration"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
|
||||
[lib]
|
||||
name = "migration"
|
||||
path = "src/lib.rs"
|
||||
|
||||
[dependencies]
|
||||
async-std = { version = "1", features = ["attributes", "tokio1"] }
|
||||
|
||||
[dependencies.sea-orm-migration]
|
||||
version = "1.1.12"
|
||||
features = [
|
||||
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
|
||||
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
|
||||
# e.g.
|
||||
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
|
||||
"sqlx-postgres", # `DATABASE_DRIVER` feature
|
||||
]
|
||||
41
migration/README.md
Normal file
41
migration/README.md
Normal file
@@ -0,0 +1,41 @@
|
||||
# Running Migrator CLI
|
||||
|
||||
- Generate a new migration file
|
||||
```sh
|
||||
cargo run -- generate MIGRATION_NAME
|
||||
```
|
||||
- Apply all pending migrations
|
||||
```sh
|
||||
cargo run
|
||||
```
|
||||
```sh
|
||||
cargo run -- up
|
||||
```
|
||||
- Apply first 10 pending migrations
|
||||
```sh
|
||||
cargo run -- up -n 10
|
||||
```
|
||||
- Rollback last applied migrations
|
||||
```sh
|
||||
cargo run -- down
|
||||
```
|
||||
- Rollback last 10 applied migrations
|
||||
```sh
|
||||
cargo run -- down -n 10
|
||||
```
|
||||
- Drop all tables from the database, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- fresh
|
||||
```
|
||||
- Rollback all applied migrations, then reapply all migrations
|
||||
```sh
|
||||
cargo run -- refresh
|
||||
```
|
||||
- Rollback all applied migrations
|
||||
```sh
|
||||
cargo run -- reset
|
||||
```
|
||||
- Check the status of all migrations
|
||||
```sh
|
||||
cargo run -- status
|
||||
```
|
||||
22
migration/src/lib.rs
Normal file
22
migration/src/lib.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
pub use sea_orm_migration::prelude::*;
|
||||
|
||||
mod m20250525_000001_create_ledger_table_category;
|
||||
mod m20250525_000002_create_ledger_table_book;
|
||||
mod m20250525_000003_create_ledger_table_tag;
|
||||
mod m20250525_000004_create_ledger_table_account;
|
||||
mod m20250525_000005_create_ledger_table_transaction;
|
||||
|
||||
pub struct Migrator;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigratorTrait for Migrator {
|
||||
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
|
||||
vec![
|
||||
Box::new(m20250525_000001_create_ledger_table_category::Migration),
|
||||
Box::new(m20250525_000002_create_ledger_table_book::Migration),
|
||||
Box::new(m20250525_000003_create_ledger_table_tag::Migration),
|
||||
Box::new(m20250525_000004_create_ledger_table_account::Migration),
|
||||
Box::new(m20250525_000005_create_ledger_table_transaction::Migration),
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000001_create_ledger_table_category" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Category::Table)
|
||||
.col(
|
||||
ColumnDef::new(Category::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Category::Name).string().not_null())
|
||||
.col(ColumnDef::new(Category::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Category::ParentId).big_integer().default(0i64).not_null())
|
||||
.col(ColumnDef::new(Category::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Category::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Category::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Category::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Category {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
ParentId,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
63
migration/src/m20250525_000002_create_ledger_table_book.rs
Normal file
63
migration/src/m20250525_000002_create_ledger_table_book.rs
Normal file
@@ -0,0 +1,63 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000002_create_ledger_table_book" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
// Define how to apply this migration
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Book::Table)
|
||||
.col(
|
||||
ColumnDef::new(Book::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Book::Name).string().not_null())
|
||||
.col(ColumnDef::new(Book::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Book::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Book::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Book::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Book::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Book {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
62
migration/src/m20250525_000003_create_ledger_table_tag.rs
Normal file
62
migration/src/m20250525_000003_create_ledger_table_tag.rs
Normal file
@@ -0,0 +1,62 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000001_create_ledger_table_tag" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Tag::Table)
|
||||
.col(
|
||||
ColumnDef::new(Tag::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Tag::Name).string().not_null())
|
||||
.col(ColumnDef::new(Tag::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Tag::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Tag::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Tag::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Tag::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Tag {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
@@ -0,0 +1,64 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000004_create_ledger_table_account" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Account::Table)
|
||||
.col(
|
||||
ColumnDef::new(Account::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Account::Name).string().not_null())
|
||||
.col(ColumnDef::new(Account::Type).integer().not_null())
|
||||
.col(ColumnDef::new(Account::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Account::IsDeleted).boolean().default(false).not_null())
|
||||
.col(
|
||||
ColumnDef::new(Account::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Account::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Account::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Account {
|
||||
Table,
|
||||
Id,
|
||||
Name,
|
||||
Uid,
|
||||
Type,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
pub struct Migration;
|
||||
|
||||
impl MigrationName for Migration {
|
||||
fn name(&self) -> &str {
|
||||
"m20250525_000005_create_ledger_table_transaction" // Make sure this matches with the file name
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl MigrationTrait for Migration {
|
||||
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.create_table(
|
||||
Table::create()
|
||||
.table(Transaction::Table)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::Id)
|
||||
.big_integer()
|
||||
.not_null()
|
||||
.auto_increment()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(ColumnDef::new(Transaction::Uid).big_integer().not_null())
|
||||
.col(ColumnDef::new(Transaction::Type).integer().not_null())
|
||||
.col(ColumnDef::new(Transaction::BookId).big_integer().not_null())
|
||||
.col(
|
||||
ColumnDef::new(Transaction::CategoryId)
|
||||
.big_integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(ColumnDef::new(Transaction::Description).string().not_null())
|
||||
.col(
|
||||
ColumnDef::new(Transaction::TransactionTime)
|
||||
.timestamp_with_time_zone()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::IsDeleted)
|
||||
.boolean()
|
||||
.default(false)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::CreatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(Transaction::UpdatedAt)
|
||||
.date_time()
|
||||
.default(Expr::current_timestamp())
|
||||
.not_null(),
|
||||
)
|
||||
.to_owned(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
// Define how to rollback this migration: Drop the Bakery table.
|
||||
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
|
||||
manager
|
||||
.drop_table(Table::drop().table(Transaction::Table).to_owned())
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Iden)]
|
||||
pub enum Transaction {
|
||||
Table,
|
||||
Id,
|
||||
Uid,
|
||||
Type,
|
||||
BookId,
|
||||
CategoryId,
|
||||
Description,
|
||||
TransactionTime,
|
||||
IsDeleted,
|
||||
CreatedAt,
|
||||
UpdatedAt,
|
||||
}
|
||||
6
migration/src/main.rs
Normal file
6
migration/src/main.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use sea_orm_migration::prelude::*;
|
||||
|
||||
#[async_std::main]
|
||||
async fn main() {
|
||||
cli::run_cli(migration::Migrator).await;
|
||||
}
|
||||
171
src/api/book.rs
Normal file
171
src/api/book.rs
Normal file
@@ -0,0 +1,171 @@
|
||||
use axum::routing::{get, post};
|
||||
use axum::{
|
||||
extract::{Path, State},
|
||||
http::StatusCode,
|
||||
Json, Router,
|
||||
};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::book::ActiveModel as BookActiveModel;
|
||||
use crate::model::db::book::Column as BookColumn;
|
||||
use crate::model::db::book::Model as BookModel;
|
||||
use crate::model::db::prelude::Book;
|
||||
use crate::model::http_body::book;
|
||||
use crate::model::http_body::book::{BookInfo, BookItem};
|
||||
use crate::model::http_body::common::SimpleResponse;
|
||||
use crate::AppState;
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use sea_orm::{ColumnTrait};
|
||||
|
||||
pub fn get_nest_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update",post(update_book_handler))
|
||||
.route("/{id}",get(get_book_by_id_handler))
|
||||
.route("/", post(create_book_handler).get(get_all_books_handler))
|
||||
}
|
||||
|
||||
// handlers
|
||||
//
|
||||
#[debug_handler]
|
||||
async fn get_all_books_handler(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<book::BookItem>>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let all_books = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let mut books: Vec<BookItem> = Vec::new();
|
||||
for b in all_books {
|
||||
let book_resp = BookItem {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
books.push(book_resp);
|
||||
}
|
||||
Ok(Json(books))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_book_by_id_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<BookItem>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
let book_query = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.filter(BookColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await
|
||||
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
|
||||
|
||||
let book_resp: BookItem;
|
||||
match book_query {
|
||||
Some(b) => {
|
||||
book_resp = BookItem {
|
||||
id: b.id.into(),
|
||||
name: b.name,
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
return Err((StatusCode::NOT_FOUND, "not_found".to_string()));
|
||||
}
|
||||
}
|
||||
Ok(Json(book_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn create_book_handler(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BookInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let book = BookActiveModel {
|
||||
name: Set(payload.name.clone().to_owned()),
|
||||
uid: Set(uid.to_owned()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let res = Book::insert(book).exec(&state.conn).await;
|
||||
let mut err_code: i64 = 0;
|
||||
let mut msg: String;
|
||||
match res {
|
||||
Ok(_) => {
|
||||
err_code = 0;
|
||||
msg = "ok".to_owned();
|
||||
}
|
||||
Err(e) => {
|
||||
err_code = 0;
|
||||
msg = e.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
let resp = SimpleResponse {
|
||||
code: err_code,
|
||||
message: msg,
|
||||
};
|
||||
|
||||
Ok(Json(resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn update_book_handler(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
Json(payload): Json<BookInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid: i64 = claims.uid.clone();
|
||||
|
||||
let exist_book = Book::find()
|
||||
.filter(BookColumn::Uid.eq(uid))
|
||||
.filter(BookColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let book: BookModel;
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "ok".to_owned(),
|
||||
};
|
||||
match exist_book {
|
||||
Ok(b) => match b {
|
||||
Some(bk) => {
|
||||
book = bk;
|
||||
}
|
||||
_ => return Err((StatusCode::NOT_FOUND, "not_found".to_string())),
|
||||
},
|
||||
Err(_) => {
|
||||
resp.code = 1;
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"connection_error".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let mut book_active_model: BookActiveModel = book.into();
|
||||
book_active_model.name = Set(payload.name.clone());
|
||||
book_active_model.updated_at = Set(Local::now().naive_utc());
|
||||
let update_res = book_active_model.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
resp.message = "ok".to_owned();
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"book_update_fail".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
Ok(Json(resp))
|
||||
}
|
||||
217
src/api/category.rs
Normal file
217
src/api/category.rs
Normal file
@@ -0,0 +1,217 @@
|
||||
use crate::api::category;
|
||||
use crate::middleware::auth::Claims;
|
||||
use crate::model::db::prelude::Category;
|
||||
use crate::model::db::{
|
||||
category::ActiveModel as CategoryActiveModel, category::Column as CategoryColumn,
|
||||
category::Model as CategoryModel,
|
||||
};
|
||||
use crate::model::http_body::category::CategoryInfo;
|
||||
use crate::model::http_body::common::{OptionalI64, SimpleResponse};
|
||||
use crate::AppState;
|
||||
use axum::extract::{Path, State};
|
||||
use axum::http::StatusCode;
|
||||
use axum::routing::{get, post};
|
||||
use axum::{Json, Router};
|
||||
use axum_macros::debug_handler;
|
||||
|
||||
use sea_orm::sqlx::types::chrono::Local;
|
||||
use sea_orm::QueryFilter;
|
||||
use sea_orm::{entity::*, query::*};
|
||||
use tokio::join;
|
||||
|
||||
pub fn get_nested_handlers() -> Router<crate::AppState> {
|
||||
Router::new()
|
||||
.route("/{id}/update", post(update_category_by_id))
|
||||
.route("/{id}", get(get_category_by_id))
|
||||
.route("/", post(create_category).get(get_all_categories))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_all_categories(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<Vec<CategoryInfo>>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let categories_query = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.all(&state.conn)
|
||||
.await;
|
||||
let category_models = match categories_query {
|
||||
Ok(categories) => categories,
|
||||
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
};
|
||||
let mut category_resp: Vec<CategoryInfo> = Vec::new();
|
||||
for category in category_models {
|
||||
let category_info = CategoryInfo {
|
||||
id: category.id.into(),
|
||||
name: category.name,
|
||||
parent_id: category.parent_id.into(),
|
||||
};
|
||||
category_resp.push(category_info);
|
||||
}
|
||||
Ok(Json(category_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn create_category(
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
payload: Json<CategoryInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let parent_id: i64 = match payload.parent_id {
|
||||
OptionalI64(pid_opt) => pid_opt.unwrap_or_else(|| 0),
|
||||
};
|
||||
let category_active_model = CategoryActiveModel {
|
||||
name: Set(payload.name.clone()),
|
||||
uid: Set(uid),
|
||||
parent_id: Set(parent_id),
|
||||
..Default::default()
|
||||
};
|
||||
let insert_res = Category::insert(category_active_model)
|
||||
.exec(&state.conn)
|
||||
.await;
|
||||
match insert_res {
|
||||
Ok(_) => Ok(Json(SimpleResponse {
|
||||
code: 0,
|
||||
message: "success".to_string(),
|
||||
})),
|
||||
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn get_category_by_id(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
) -> Result<Json<CategoryInfo>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let category_query_res = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::Id.eq(id))
|
||||
.one(&state.conn)
|
||||
.await;
|
||||
let category_query: CategoryModel = match category_query_res {
|
||||
Ok(r) => match r {
|
||||
Some(res) => res,
|
||||
None => return Err((StatusCode::NOT_FOUND, "not found".to_string())),
|
||||
},
|
||||
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
|
||||
};
|
||||
let category_resp = CategoryInfo {
|
||||
id: category_query.id.into(),
|
||||
name: category_query.name.clone(),
|
||||
parent_id: category_query.parent_id.into(),
|
||||
};
|
||||
Ok(Json(category_resp))
|
||||
}
|
||||
|
||||
#[debug_handler]
|
||||
async fn update_category_by_id(
|
||||
Path(id): Path<i64>,
|
||||
state: State<AppState>,
|
||||
claims: Claims,
|
||||
payload: Json<CategoryInfo>,
|
||||
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
|
||||
let uid = claims.uid.clone();
|
||||
let mut parent_category_required = false;
|
||||
let mut parent_id: i64 = 0;
|
||||
let category_query = Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::Id.eq(id))
|
||||
.one(&state.conn);
|
||||
let parent_query = match payload.parent_id {
|
||||
OptionalI64(Some(cid)) => {
|
||||
if cid > 0 {
|
||||
parent_category_required = true;
|
||||
parent_id = cid;
|
||||
Some(
|
||||
Category::find()
|
||||
.filter(CategoryColumn::Uid.eq(uid))
|
||||
.filter(CategoryColumn::ParentId.eq(cid))
|
||||
.one(&state.conn),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
OptionalI64(None) => None,
|
||||
};
|
||||
|
||||
let (category_result, parent_result) = if let Some(parent_query) = parent_query {
|
||||
// 并发执行两个查询
|
||||
let (category, parent) = join!(category_query, parent_query);
|
||||
// 处理查询结果
|
||||
(
|
||||
category.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
parent.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
)
|
||||
} else {
|
||||
// 只查询 category
|
||||
(
|
||||
category_query.await.map_err(|e| {
|
||||
(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
format!("Database error: {}", e),
|
||||
)
|
||||
})?,
|
||||
None,
|
||||
)
|
||||
};
|
||||
|
||||
let category = match category_result {
|
||||
Some(category) => {
|
||||
category
|
||||
}
|
||||
None => {
|
||||
return Err((StatusCode::NOT_FOUND, "Category not found".to_string()));
|
||||
}
|
||||
};
|
||||
let parent_category_valid = match parent_result {
|
||||
Some(_) => { true }
|
||||
None => { false }
|
||||
};
|
||||
|
||||
let mut resp = SimpleResponse {
|
||||
code: 0,
|
||||
message: "success".to_string(),
|
||||
};
|
||||
let mut category_active_model : CategoryActiveModel = category.into();
|
||||
category_active_model.name = Set(payload.name.clone());
|
||||
category_active_model.updated_at = Set(Local::now().naive_utc());
|
||||
if parent_category_required && parent_id > 0 {
|
||||
category_active_model.parent_id = Set(parent_id.into());
|
||||
}
|
||||
|
||||
let update_res = category_active_model.update(&state.conn).await;
|
||||
match update_res {
|
||||
Ok(_) => {
|
||||
resp.code = 0;
|
||||
resp.message = "ok".to_owned();
|
||||
}
|
||||
Err(_) => {
|
||||
return Err((
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
"category update failed".to_string(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if parent_category_required && !parent_category_valid {
|
||||
resp.code = 1;
|
||||
resp.message = "Parent category not found".to_string();
|
||||
return Err((StatusCode::NOT_FOUND, "Parent category not found".to_string()));
|
||||
}
|
||||
Ok(Json(resp))
|
||||
}
|
||||
2
src/api/mod.rs
Normal file
2
src/api/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod book;
|
||||
pub mod category;
|
||||
120
src/main.rs
120
src/main.rs
@@ -1,3 +1,119 @@
|
||||
fn main() {
|
||||
println!("Hello, world!");
|
||||
use crate::middleware::auth;
|
||||
use axum::{http::Method, Router};
|
||||
use clap::Parser;
|
||||
use sea_orm::{Database, DatabaseConnection};
|
||||
use serde::Deserialize;
|
||||
use tower::ServiceBuilder;
|
||||
use tower_http::cors::{Any, CorsLayer};
|
||||
use tower_http::trace::TraceLayer;
|
||||
use tracing::info;
|
||||
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
// Project modules
|
||||
mod api;
|
||||
mod middleware;
|
||||
mod model;
|
||||
mod util;
|
||||
mod query;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
dotenvy::dotenv().unwrap();
|
||||
// initialize tracing
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_subscriber::fmt::layer())
|
||||
.init();
|
||||
let cli = Cli::parse();
|
||||
match cli.command {
|
||||
Command::Serve { config_path } => {
|
||||
if let Ok(config) = load_config(&config_path).await {
|
||||
println!("Loaded config.");
|
||||
println!("{},{}", config.service.host.clone(), config.service.port);
|
||||
// Proceed with server initialization using `config`
|
||||
start_server(&config).await;
|
||||
} else {
|
||||
eprintln!("Failed to load config from {}", config_path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AppState {
|
||||
conn: DatabaseConnection,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Key {
|
||||
jwt: String,
|
||||
user: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct DatabaseConf {
|
||||
connection: String,
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct ServiceConf {
|
||||
host: String,
|
||||
port: u32,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct Config {
|
||||
service: ServiceConf,
|
||||
database: DatabaseConf,
|
||||
keys: Key,
|
||||
}
|
||||
|
||||
#[derive(clap::Parser)]
|
||||
struct Cli {
|
||||
#[command(subcommand)]
|
||||
command: Command,
|
||||
}
|
||||
|
||||
#[derive(clap::Subcommand)]
|
||||
enum Command {
|
||||
Serve {
|
||||
#[arg(long = "conf")]
|
||||
config_path: String,
|
||||
},
|
||||
}
|
||||
async fn load_config(path: &str) -> Result<Config, Box<dyn std::error::Error>> {
|
||||
let content = tokio::fs::read_to_string(path).await?;
|
||||
let config: Config = toml::from_str(&content)?;
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
// ====== Commands ======
|
||||
|
||||
// start http server
|
||||
async fn start_server(config: &Config) {
|
||||
let conn = Database::connect(&config.database.connection)
|
||||
.await
|
||||
.expect("Database connection failed.");
|
||||
|
||||
auth::initialize_jwt_key(config.keys.jwt.clone());
|
||||
|
||||
let state = AppState { conn };
|
||||
// Build router
|
||||
let cors_layer = CorsLayer::new()
|
||||
.allow_methods([Method::GET, Method::POST])
|
||||
.allow_origin(Any);
|
||||
let global_layer = ServiceBuilder::new()
|
||||
.layer(TraceLayer::new_for_http())
|
||||
.layer(cors_layer);
|
||||
|
||||
let app = Router::new()
|
||||
.nest("/api/v1/book", api::book::get_nest_handlers())
|
||||
.nest("/api/v1/category", api::category::get_nested_handlers())
|
||||
.with_state(state)
|
||||
.layer(global_layer);
|
||||
let host = config.service.host.clone();
|
||||
let port = config.service.port;
|
||||
let server_url = format!("{host}:{port}");
|
||||
let listener = tokio::net::TcpListener::bind(&server_url).await.unwrap();
|
||||
axum::serve(listener, app)
|
||||
.await
|
||||
.expect("Service panic happened");
|
||||
}
|
||||
|
||||
120
src/middleware/auth.rs
Normal file
120
src/middleware/auth.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
use axum::{
|
||||
extract::FromRequestParts,
|
||||
http::{request::Parts, StatusCode},
|
||||
response::{IntoResponse, Response},
|
||||
Json, RequestPartsExt,
|
||||
};
|
||||
use axum_extra::{
|
||||
headers::{authorization::Bearer, Authorization},
|
||||
TypedHeader,
|
||||
};
|
||||
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::json;
|
||||
use std::fmt::Display;
|
||||
use std::sync::OnceLock;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Claims {
|
||||
sub: String,
|
||||
// company: String,
|
||||
exp: usize,
|
||||
pub uid: i64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct AuthBody {
|
||||
access_token: String,
|
||||
token_type: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AuthPayload {
|
||||
client_id: String,
|
||||
client_secret: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AuthError {
|
||||
WrongCredentials,
|
||||
MissingCredentials,
|
||||
TokenCreation,
|
||||
InvalidToken,
|
||||
}
|
||||
|
||||
static KEYS: OnceLock<Keys> = OnceLock::new();
|
||||
|
||||
pub fn initialize_jwt_key(key_str: String) {
|
||||
let res = KEYS.set(Keys::new(key_str.as_bytes()));
|
||||
match res {
|
||||
Ok(_) => {}
|
||||
Err(_) => panic!("jwt key initialize failed"),
|
||||
}
|
||||
}
|
||||
|
||||
struct Keys {
|
||||
encoding: EncodingKey,
|
||||
decoding: DecodingKey,
|
||||
}
|
||||
|
||||
impl Keys {
|
||||
fn new(secret: &[u8]) -> Self {
|
||||
Self {
|
||||
encoding: EncodingKey::from_secret(secret),
|
||||
decoding: DecodingKey::from_secret(secret),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Claims {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Email: {}", self.sub)
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthBody {
|
||||
fn new(access_token: String) -> Self {
|
||||
Self {
|
||||
access_token,
|
||||
token_type: "Bearer".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> FromRequestParts<S> for Claims
|
||||
where
|
||||
S: Send + Sync,
|
||||
{
|
||||
type Rejection = AuthError;
|
||||
|
||||
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
|
||||
// Extract the token from the authorization header
|
||||
let TypedHeader(Authorization(bearer)) = parts
|
||||
.extract::<TypedHeader<Authorization<Bearer>>>()
|
||||
.await
|
||||
.map_err(|_| AuthError::InvalidToken)?;
|
||||
// Decode the user data
|
||||
let token_data = decode::<Claims>(
|
||||
bearer.token(),
|
||||
&KEYS.get().unwrap().decoding,
|
||||
&Validation::default(),
|
||||
)
|
||||
.map_err(|_| AuthError::InvalidToken)?;
|
||||
|
||||
Ok(token_data.claims)
|
||||
}
|
||||
}
|
||||
impl IntoResponse for AuthError {
|
||||
fn into_response(self) -> Response {
|
||||
let (status, error_message) = match self {
|
||||
AuthError::WrongCredentials => (StatusCode::UNAUTHORIZED, "Wrong credentials"),
|
||||
AuthError::MissingCredentials => (StatusCode::BAD_REQUEST, "Missing credentials"),
|
||||
AuthError::TokenCreation => (StatusCode::INTERNAL_SERVER_ERROR, "Token creation error"),
|
||||
AuthError::InvalidToken => (StatusCode::BAD_REQUEST, "Invalid token"),
|
||||
};
|
||||
let body = Json(json!({
|
||||
"error": error_message,
|
||||
}));
|
||||
(status, body).into_response()
|
||||
}
|
||||
}
|
||||
1
src/middleware/mod.rs
Normal file
1
src/middleware/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod auth;
|
||||
22
src/model/db/account.rs
Normal file
22
src/model/db/account.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "account")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub r#type: i32,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
21
src/model/db/book.rs
Normal file
21
src/model/db/book.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "book")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
22
src/model/db/category.rs
Normal file
22
src/model/db/category.rs
Normal file
@@ -0,0 +1,22 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "category")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub parent_id: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
9
src/model/db/mod.rs
Normal file
9
src/model/db/mod.rs
Normal file
@@ -0,0 +1,9 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
pub mod prelude;
|
||||
|
||||
pub mod account;
|
||||
pub mod book;
|
||||
pub mod category;
|
||||
pub mod tag;
|
||||
pub mod transaction;
|
||||
7
src/model/db/prelude.rs
Normal file
7
src/model/db/prelude.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
pub use super::account::Entity as Account;
|
||||
pub use super::book::Entity as Book;
|
||||
pub use super::category::Entity as Category;
|
||||
pub use super::tag::Entity as Tag;
|
||||
pub use super::transaction::Entity as Transaction;
|
||||
21
src/model/db/tag.rs
Normal file
21
src/model/db/tag.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "tag")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub uid: i64,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
25
src/model/db/transaction.rs
Normal file
25
src/model/db/transaction.rs
Normal file
@@ -0,0 +1,25 @@
|
||||
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
|
||||
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "transaction")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: i64,
|
||||
pub uid: i64,
|
||||
pub r#type: i32,
|
||||
pub book_id: i64,
|
||||
pub category_id: i64,
|
||||
pub description: String,
|
||||
pub transaction_time: DateTimeWithTimeZone,
|
||||
pub is_deleted: bool,
|
||||
pub created_at: DateTime,
|
||||
pub updated_at: DateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
14
src/model/http_body/book.rs
Normal file
14
src/model/http_body/book.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
use serde::{Serialize, Deserialize};
|
||||
use super::common::{number_stringify, OptionalI64};
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct BookItem {
|
||||
#[serde(with="number_stringify")]
|
||||
pub id: OptionalI64,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize,Deserialize)]
|
||||
pub struct BookInfo {
|
||||
pub name: String,
|
||||
}
|
||||
10
src/model/http_body/category.rs
Normal file
10
src/model/http_body/category.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use super::common::{number_stringify, OptionalI64};
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct CategoryInfo {
|
||||
#[serde(with="number_stringify")]
|
||||
pub id: OptionalI64,
|
||||
pub name: String,
|
||||
#[serde(with="number_stringify")]
|
||||
pub parent_id: OptionalI64,
|
||||
}
|
||||
88
src/model/http_body/common.rs
Normal file
88
src/model/http_body/common.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::{Display, Formatter, Result as FmtResult};
|
||||
use std::num::ParseIntError;
|
||||
use std::str::FromStr;
|
||||
|
||||
#[derive(Serialize)]
|
||||
pub struct SimpleResponse {
|
||||
pub code: i64,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct OptionalI64(pub Option<i64>);
|
||||
|
||||
impl OptionalI64 {
|
||||
// 构造函数:从 i64 创建 Some
|
||||
pub fn new(value: i64) -> Self {
|
||||
OptionalI64(Some(value))
|
||||
}
|
||||
|
||||
// 构造函数:直接创建 None
|
||||
pub fn none() -> Self {
|
||||
OptionalI64(None)
|
||||
}
|
||||
|
||||
// 从 Option<i64> 转换
|
||||
pub fn from_option(value: Option<i64>) -> Self {
|
||||
OptionalI64(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<i64> for OptionalI64 {
|
||||
fn from(value: i64) -> Self {
|
||||
OptionalI64(Some(value))
|
||||
}
|
||||
}
|
||||
impl From<Option<i64>> for OptionalI64 {
|
||||
fn from(value: Option<i64>) -> Self {
|
||||
OptionalI64(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for OptionalI64 {
|
||||
type Err = std::num::ParseIntError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if s.is_empty() || s.eq_ignore_ascii_case("null") {
|
||||
Ok(OptionalI64(None))
|
||||
} else {
|
||||
s.parse::<i64>().map(|n| OptionalI64(Some(n)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for OptionalI64 {
|
||||
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
|
||||
match self.0 {
|
||||
Some(num) => write!(f, "{}", num), // 有值时输出数字
|
||||
None => write!(f, ""), // None 时输出空字符串
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub mod number_stringify {
|
||||
use std::fmt::Display;
|
||||
use std::str::FromStr;
|
||||
|
||||
use serde::{de, Deserialize, Deserializer, Serializer};
|
||||
|
||||
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
T: Display,
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.collect_str(value)
|
||||
}
|
||||
|
||||
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
|
||||
where
|
||||
T: FromStr,
|
||||
T::Err: Display,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
String::deserialize(deserializer)?
|
||||
.parse()
|
||||
.map_err(de::Error::custom)
|
||||
}
|
||||
}
|
||||
3
src/model/http_body/mod.rs
Normal file
3
src/model/http_body/mod.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub mod book;
|
||||
pub mod common;
|
||||
pub mod category;
|
||||
2
src/model/mod.rs
Normal file
2
src/model/mod.rs
Normal file
@@ -0,0 +1,2 @@
|
||||
pub mod db;
|
||||
pub mod http_body;
|
||||
4
src/query/book.rs
Normal file
4
src/query/book.rs
Normal file
@@ -0,0 +1,4 @@
|
||||
// use crate::model::db::prelude::Book;
|
||||
// pub fn get_book_by_id(id:i64, uid:i64)->Option<Book> {
|
||||
//
|
||||
// }
|
||||
1
src/query/mod.rs
Normal file
1
src/query/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
mod book;
|
||||
1
src/util/mod.rs
Normal file
1
src/util/mod.rs
Normal file
@@ -0,0 +1 @@
|
||||
pub mod pass;
|
||||
16
src/util/pass.rs
Normal file
16
src/util/pass.rs
Normal file
@@ -0,0 +1,16 @@
|
||||
use std::error::Error;
|
||||
use pbkdf2::{
|
||||
password_hash::{
|
||||
rand_core::OsRng,
|
||||
PasswordHash,SaltString,
|
||||
},
|
||||
Pbkdf2,
|
||||
};
|
||||
use pbkdf2::password_hash::PasswordHasher;
|
||||
|
||||
pub fn get_pbkdf2_from_psw(password:String) -> Result<String, pbkdf2::password_hash::Error> {
|
||||
let salt = SaltString::generate(&mut OsRng);
|
||||
let password_hash = Pbkdf2.hash_password(password.as_bytes(), &salt)?.to_string();
|
||||
println!("{}",password_hash);
|
||||
return Ok(password_hash)
|
||||
}
|
||||
Reference in New Issue
Block a user