Compare commits

..

3 Commits

Author SHA1 Message Date
acx
952a37892d feat: add jwt auth 2024-07-18 15:39:09 +00:00
acx
7270399f35 feat: category crud 2024-07-09 17:06:20 +00:00
acx
bddf92686c init 2024-07-07 15:53:31 +00:00
61 changed files with 975 additions and 4701 deletions

8
.editorconfig Normal file
View File

@@ -0,0 +1,8 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[Makefile]
indent_style = tab

3
.gitignore vendored
View File

@@ -3,6 +3,3 @@
.vscode .vscode
.DS_Store .DS_Store
.env .env
conf.toml
config.toml
.fleet\

2850
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,31 +4,22 @@ version = "0.1.0"
edition = "2021" edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[workspace]
members = [".", "migration"]
[dependencies] [dependencies]
axum = { version = "0.8" } async-trait = "0.1.81"
axum-extra = {version = "0.10", features = ["typed-header", "typed-routing"] } axum = {version = "0.7.5", features = ["macros"]}
axum-macros = "0.5" axum-extra = { version = "0.9.3", features = ["typed-header"] }
sea-orm = { version = "1.1.16", features = [ chrono = {version = "0.4", features = ["serde"]}
"sqlx-postgres", deadpool-diesel = {version ="0.6.1", features = ["postgres"]}
"runtime-tokio-rustls", diesel = { version = "2", features = ["postgres", "chrono"] }
"macros", dotenvy = "0.15"
"chrono", jsonwebtoken = "9.3.0"
] } serde = { version = "1.0.202", features = ["derive"] }
serde = { version = "1", features = ["derive"] } serde_json = "1"
serde_json = "1.0.140" tokio = { version = "1.37.0", features = ["full"] }
tokio = { version = "1.0", features = ["full"] } tower = "0.4.13"
tower-http = {version= "0.5.2", features=["trace", "cors"] }
tracing = "0.1" tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] } tracing-subscriber = { version = "0.3", features = ["env-filter"] }
dotenvy = "0.15.7" once_cell = "1.19.0"
toml = "0.9.7" axum-macros = "0.4.1"
clap = { version = "4.0", features = ["derive"] }
pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core = { version = "0.9.3", features = ["std"] }
jsonwebtoken = "9"
once_cell = "1.21.3"
tower-http = {version= "0.6", features = ["trace", "cors"] }
tower = "0.5.2"
rand = "0.9.2"

2
Makefile Normal file
View File

@@ -0,0 +1,2 @@
build-schema:
diesel print-schema > src/model/schema.rs

9
diesel.toml Normal file
View File

@@ -0,0 +1,9 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "/data/codes/helios-server-rs/migrations"

View File

@@ -1,22 +0,0 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "1.1.12"
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
"sqlx-postgres", # `DATABASE_DRIVER` feature
]

View File

@@ -1,41 +0,0 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

View File

@@ -1,24 +0,0 @@
pub use sea_orm_migration::prelude::*;
mod m20250525_000001_create_ledger_table_category;
mod m20250525_000002_create_ledger_table_book;
mod m20250525_000003_create_ledger_table_tag;
mod m20250525_000004_create_ledger_table_account;
mod m20250525_000005_create_ledger_table_transaction;
mod m20250921_000001_create_ledger_table_transaction_tag_rel;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20250525_000001_create_ledger_table_category::Migration),
Box::new(m20250525_000002_create_ledger_table_book::Migration),
Box::new(m20250525_000003_create_ledger_table_tag::Migration),
Box::new(m20250525_000004_create_ledger_table_account::Migration),
Box::new(m20250525_000005_create_ledger_table_transaction::Migration),
Box::new(m20250921_000001_create_ledger_table_transaction_tag_rel::Migration),
]
}
}

View File

@@ -1,64 +0,0 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000001_create_ledger_table_category" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Category::Table)
.col(
ColumnDef::new(Category::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Category::Name).string().not_null())
.col(ColumnDef::new(Category::Uid).big_integer().not_null())
.col(ColumnDef::new(Category::ParentId).big_integer().default(0i64).not_null())
.col(ColumnDef::new(Category::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Category::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Category::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Category::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Category {
Table,
Id,
Name,
Uid,
ParentId,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,63 +0,0 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000002_create_ledger_table_book" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
// Define how to apply this migration
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Book::Table)
.col(
ColumnDef::new(Book::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Book::Name).string().not_null())
.col(ColumnDef::new(Book::Uid).big_integer().not_null())
.col(ColumnDef::new(Book::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Book::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Book::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Book::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Book {
Table,
Id,
Name,
Uid,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,62 +0,0 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000001_create_ledger_table_tag" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Tag::Table)
.col(
ColumnDef::new(Tag::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Tag::Name).string().not_null())
.col(ColumnDef::new(Tag::Uid).big_integer().not_null())
.col(ColumnDef::new(Tag::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Tag::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Tag::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Tag::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Tag {
Table,
Id,
Name,
Uid,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,64 +0,0 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000004_create_ledger_table_account" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Account::Table)
.col(
ColumnDef::new(Account::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Account::Name).string().not_null())
.col(ColumnDef::new(Account::Type).integer().not_null())
.col(ColumnDef::new(Account::Uid).big_integer().not_null())
.col(ColumnDef::new(Account::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Account::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Account::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Account::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Account {
Table,
Id,
Name,
Uid,
Type,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,84 +0,0 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000005_create_ledger_table_transaction" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Transaction::Table)
.col(
ColumnDef::new(Transaction::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Transaction::Uid).big_integer().not_null())
.col(ColumnDef::new(Transaction::Type).integer().not_null())
.col(ColumnDef::new(Transaction::BookId).big_integer().not_null())
.col(
ColumnDef::new(Transaction::CategoryId)
.big_integer()
.not_null(),
)
.col(ColumnDef::new(Transaction::Description).string().not_null())
.col(
ColumnDef::new(Transaction::TransactionTime)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Transaction::IsDeleted)
.boolean()
.default(false)
.not_null(),
)
.col(
ColumnDef::new(Transaction::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Transaction::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Transaction::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Transaction {
Table,
Id,
Uid,
Type,
BookId,
CategoryId,
Description,
TransactionTime,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,71 +0,0 @@
use crate::sea_query;
use sea_orm_migration::{MigrationName, MigrationTrait, SchemaManager};
use crate::{async_trait, ColumnDef, DbErr, Expr, Iden, Table};
pub struct Migration;
impl MigrationName for crate::m20250921_000001_create_ledger_table_transaction_tag_rel::Migration {
fn name(&self) -> &str {
"m20250921_000001_create_ledger_table_transaction_tag_rel" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for crate::m20250921_000001_create_ledger_table_transaction_tag_rel::Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Table)
.col(
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Uid).big_integer().not_null())
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::TransactionId).big_integer().not_null())
.col(ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::TagId).big_integer().not_null())
.col(
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::IsDeleted)
.boolean()
.default(false)
.not_null(),
)
.col(
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(crate::m20250921_000001_create_ledger_table_transaction_tag_rel::TransactionTagRel::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum TransactionTagRel {
Table,
Id,
Uid,
TransactionId,
TagId,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -1,6 +0,0 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

View File

@@ -0,0 +1,6 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -0,0 +1,36 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -0,0 +1,11 @@
-- This file should undo anything in `up.sql`
-- This file should undo anything in `up.sql`
DROP TABLE IF EXISTS "categories";
DROP TABLE IF EXISTS "tags";
DROP TABLE IF EXISTS "books";
DROP TABLE IF EXISTS "transactions";
DROP TABLE IF EXISTS "transaction_tag_rels";
DROP TABLE IF EXISTS "accounts";
DROP TABLE IF EXISTS "amounts";
DROP TABLE IF EXISTS "users";

View File

@@ -0,0 +1,81 @@
-- Your SQL goes here
-- Your SQL goes here
CREATE TABLE "categories" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "tags" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "books" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "transactions" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"book_id" BIGINT NOT NULL,
"description" TEXT NOT NULL,
"category_id" BIGINT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"time" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT current_timestamp,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "transaction_tag_rels" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"transaction_id" BIGINT NOT NULL,
"tag_id" BIGINT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "accounts" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"type" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "amounts" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"transaction_id" BIGINT NOT NULL,
"value" BIGINT NOT NULL DEFAULT 0,
"expo" BIGINT NOT NULL DEFAULT 5,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "users" (
"id" BIGSERIAL PRIMARY KEY,
"username" TEXT NOT NULL,
"password" TEXT NOT NULL,
"mail" TEXT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);

View File

@@ -1,62 +0,0 @@
use crate::middleware::auth::Claims;
use crate::model::db::account::{
ActiveModel as AccountActiveModel, Column as AccountColumn, Model as AccountModel,
};
use crate::model::db::prelude::Account as AccountPrelude;
use crate::model::http_body::account::{AccountReq, AccountResp};
use crate::model::http_body::common::SimpleResponse;
use crate::AppState;
use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::routing::{get, post};
use axum::{Json, Router};
use sea_orm::sqlx::types::chrono::Local;
use sea_orm::{ActiveModelTrait, DbErr, Iden, Set};
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/{id}/update", post(update_account_handler))
.route("/{id}", get(get_account_by_id_handler))
.route(
"/",
post(create_account_handler).get(get_all_accounts_handler),
)
}
async fn update_account_handler(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
Json(payload): Json<AccountReq>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let mut active_model: AccountActiveModel = AccountPrelude::find_by_id(id)
.filter(AccountColumn::Uid.eq(uid))
.filter(AccountColumn::IsDeleted.eq(false))
.one(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
match payload.name {
Some(n) => {
active_model.name = Set(n);
}
_ => {}
}
active_model.updated_at = Set(Local::now().naive_utc());
active_model.update(&state.conn).await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let resp = SimpleResponse{
code: 0,
message: "".to_string()
};
Ok(Json(resp))
}
async fn get_account_by_id_handler() {}
async fn create_account_handler() {}
async fn get_all_accounts_handler() {}

View File

@@ -1,171 +0,0 @@
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use crate::middleware::auth::Claims;
use crate::model::db::book::ActiveModel as BookActiveModel;
use crate::model::db::book::Column as BookColumn;
use crate::model::db::book::Model as BookModel;
use crate::model::db::prelude::Book;
use crate::model::http_body::book;
use crate::model::http_body::book::{BookInfo, BookResp};
use crate::model::http_body::common::SimpleResponse;
use crate::AppState;
use sea_orm::sqlx::types::chrono::Local;
use sea_orm::{entity::*, query::*};
use sea_orm::{ColumnTrait};
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/{id}/update",post(update_book_handler))
.route("/{id}",get(get_book_by_id_handler))
.route("/", post(create_book_handler).get(get_all_books_handler))
}
// handlers
//
#[debug_handler]
async fn get_all_books_handler(
state: State<AppState>,
claims: Claims,
) -> Result<Json<Vec<book::BookResp>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let all_books = Book::find()
.filter(BookColumn::Uid.eq(uid))
.all(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let mut books: Vec<BookResp> = Vec::new();
for b in all_books {
let book_resp = BookResp {
id: b.id.into(),
name: b.name,
};
books.push(book_resp);
}
Ok(Json(books))
}
#[debug_handler]
async fn get_book_by_id_handler(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
) -> Result<Json<BookResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let book_query = Book::find()
.filter(BookColumn::Uid.eq(uid))
.filter(BookColumn::Id.eq(id))
.one(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let book_resp: BookResp;
match book_query {
Some(b) => {
book_resp = BookResp {
id: b.id.into(),
name: b.name,
};
}
_ => {
return Err((StatusCode::NOT_FOUND, "not_found".to_string()));
}
}
Ok(Json(book_resp))
}
#[debug_handler]
async fn create_book_handler(
state: State<AppState>,
claims: Claims,
Json(payload): Json<BookInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let book = BookActiveModel {
name: Set(payload.name.clone().to_owned()),
uid: Set(uid.to_owned()),
..Default::default()
};
let res = Book::insert(book).exec(&state.conn).await;
let mut err_code: i64 = 0;
let mut msg: String;
match res {
Ok(_) => {
err_code = 0;
msg = "ok".to_owned();
}
Err(e) => {
err_code = 0;
msg = e.to_string();
}
}
let resp = SimpleResponse {
code: err_code,
message: msg,
};
Ok(Json(resp))
}
#[debug_handler]
async fn update_book_handler(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
Json(payload): Json<BookInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let exist_book = Book::find()
.filter(BookColumn::Uid.eq(uid))
.filter(BookColumn::Id.eq(id))
.one(&state.conn)
.await;
let book: BookModel;
let mut resp = SimpleResponse {
code: 0,
message: "ok".to_owned(),
};
match exist_book {
Ok(b) => match b {
Some(bk) => {
book = bk;
}
_ => return Err((StatusCode::NOT_FOUND, "not_found".to_string())),
},
Err(_) => {
resp.code = 1;
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"connection_error".to_string(),
));
}
}
let mut book_active_model: BookActiveModel = book.into();
book_active_model.name = Set(payload.name.clone());
book_active_model.updated_at = Set(Local::now().naive_utc());
let update_res = book_active_model.update(&state.conn).await;
match update_res {
Ok(_) => {
resp.code = 0;
resp.message = "ok".to_owned();
}
Err(_) => {
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"book_update_fail".to_string(),
));
}
}
Ok(Json(resp))
}

View File

@@ -1,198 +0,0 @@
// #[cfg(test)]
// mod tests {
// use super::*;
// use axum::{
// http::{Request, StatusCode},
// Router,
// routing::{get, put},
// body::Body,
// };
// use sea_orm::{
// MockDatabase, MockExecResult, DatabaseConnection, DatabaseTransaction,
// entity::prelude::*,
// QueryFilter, Condition, DbErr, EntityTrait,
// };
// use serde_json::{json, Value};
// use tower::ServiceExt;
// use std::sync::Arc;
//
// // 模拟 Book 实体
// #[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
// #[sea_orm(table_name = "books")]
// pub struct Model {
// #[sea_orm(primary_key)]
// pub id: i32,
// pub title: String,
// pub author: String,
// }
//
// #[derive(Copy, Clone, Debug, EnumIter)]
// pub enum Relation {}
//
// impl Related<super::book::Entity> for Entity {
// fn to() -> RelationDef {
// panic!("No relations defined")
// }
// }
//
// // 创建测试用的 Router
// async fn setup_router(db: DatabaseConnection) -> Router {
// Router::new()
// .route("/books/:id", get(get_book_by_id).put(update_book_by_id))
// .route("/books", get(get_all_book))
// .with_state(Arc::new(db))
// }
//
// // 测试 get_book_by_id
// #[tokio::test]
// async fn test_get_book_by_id() {
// // 设置模拟数据库
// let db = MockDatabase::new(DatabaseBackend::Postgres)
// .append_query_results(vec![vec![Model {
// id: 1,
// title: "Test Book".to_string(),
// author: "Test Author".to_string(),
// }]])
// .into_connection();
//
// let app = setup_router(db).await;
//
// // 构造请求
// let request = Request::builder()
// .uri("/books/1")
// .method("GET")
// .body(Body::empty())
// .unwrap();
//
// // 发送请求
// let response = app.oneshot(request).await.unwrap();
// assert_eq!(response.status(), StatusCode::OK);
//
// // 解析响应
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
// let body: Value = serde_json::from_slice(&body).unwrap();
// assert_eq!(
// body,
// json!({
// "id": 1,
// "title": "Test Book",
// "author": "Test Author"
// })
// );
// }
//
// // 测试 get_book_by_id 未找到
// #[tokio::test]
// async fn test_get_book_by_id_not_found() {
// let db = MockDatabase::new(DatabaseBackend::Postgres)
// .append_query_results(vec![vec![] as Vec<Model>])
// .into_connection();
//
// let app = setup_router(db).await;
//
// let request = Request::builder()
// .uri("/books/999")
// .method("GET")
// .body(Body::empty())
// .unwrap();
//
// let response = app.oneshot(request).await.unwrap();
// assert_eq!(response.status(), StatusCode::NOT_FOUND);
// }
//
// // 测试 update_book_by_id
// #[tokio::test]
// async fn test_update_book_by_id() {
// let db = MockDatabase::new(DatabaseBackend::Postgres)
// .append_query_results(vec![vec![Model {
// id: 1,
// title: "Updated Book".to_string(),
// author: "Updated Author".to_string(),
// }]])
// .append_exec_results(vec![MockExecResult {
// last_insert_id: 1,
// rows_affected: 1,
// }])
// .into_connection();
//
// let app = setup_router(db).await;
//
// // 构造请求
// let request = Request::builder()
// .uri("/books/1")
// .method("PUT")
// .header("Content-Type", "application/json")
// .body(Body::from(
// json!({
// "title": "Updated Book",
// "author": "Updated Author"
// })
// .to_string(),
// ))
// .unwrap();
//
// // 发送请求
// let response = app.oneshot(request).await.unwrap();
// assert_eq!(response.status(), StatusCode::OK);
//
// // 解析响应
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
// let body: Value = serde_json::from_slice(&body).unwrap();
// assert_eq!(
// body,
// json!({
// "id": 1,
// "title": "Updated Book",
// "author": "Updated Author"
// })
// );
// }
//
// // 测试 get_all_book
// #[tokio::test]
// async fn test_get_all_book() {
// let db = MockDatabase::new(DatabaseBackend::Postgres)
// .append_query_results(vec![vec![
// Model {
// id: 1,
// title: "Book 1".to_string(),
// author: "Author 1".to_string(),
// },
// Model {
// id: 2,
// title: "Book 2".to_string(),
// author: "Author 2".to_string(),
// },
// ]])
// .into_connection();
//
// let app = setup_router(db).await;
//
// let request = Request::builder()
// .uri("/books")
// .method("GET")
// .body(Body::empty())
// .unwrap();
//
// let response = app.oneshot(request).await.unwrap();
// assert_eq!(response.status(), StatusCode::OK);
//
// let body = hyper::body::to_bytes(response.into_body()).await.unwrap();
// let body: Value = serde_json::from_slice(&body).unwrap();
// assert_eq!(
// body,
// json!([
// {
// "id": 1,
// "title": "Book 1",
// "author": "Author 1"
// },
// {
// "id": 2,
// "title": "Book 2",
// "author": "Author 2"
// }
// ])
// );
// }
// }

View File

@@ -1,217 +0,0 @@
use crate::api::category;
use crate::middleware::auth::Claims;
use crate::model::db::prelude::Category;
use crate::model::db::{
category::ActiveModel as CategoryActiveModel, category::Column as CategoryColumn,
category::Model as CategoryModel,
};
use crate::model::http_body::category::CategoryInfo;
use crate::model::http_body::common::{OptionalI64, SimpleResponse};
use crate::AppState;
use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::routing::{get, post};
use axum::{Json, Router};
use axum_macros::debug_handler;
use sea_orm::sqlx::types::chrono::Local;
use sea_orm::QueryFilter;
use sea_orm::{entity::*, query::*};
use tokio::join;
pub fn get_nested_handlers() -> Router<crate::AppState> {
Router::new()
.route("/{id}/update", post(update_category_by_id))
.route("/{id}", get(get_category_by_id))
.route("/", post(create_category).get(get_all_categories))
}
#[debug_handler]
async fn get_all_categories(
state: State<AppState>,
claims: Claims,
) -> Result<Json<Vec<CategoryInfo>>, (StatusCode, String)> {
let uid = claims.uid.clone();
let categories_query = Category::find()
.filter(CategoryColumn::Uid.eq(uid))
.all(&state.conn)
.await;
let category_models = match categories_query {
Ok(categories) => categories,
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
};
let mut category_resp: Vec<CategoryInfo> = Vec::new();
for category in category_models {
let category_info = CategoryInfo {
id: category.id.into(),
name: category.name,
parent_id: category.parent_id.into(),
};
category_resp.push(category_info);
}
Ok(Json(category_resp))
}
#[debug_handler]
async fn create_category(
state: State<AppState>,
claims: Claims,
payload: Json<CategoryInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid = claims.uid.clone();
let parent_id: i64 = match payload.parent_id {
OptionalI64(pid_opt) => pid_opt.unwrap_or_else(|| 0),
};
let category_active_model = CategoryActiveModel {
name: Set(payload.name.clone()),
uid: Set(uid),
parent_id: Set(parent_id),
..Default::default()
};
let insert_res = Category::insert(category_active_model)
.exec(&state.conn)
.await;
match insert_res {
Ok(_) => Ok(Json(SimpleResponse {
code: 0,
message: "success".to_string(),
})),
Err(e) => Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
}
}
#[debug_handler]
async fn get_category_by_id(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
) -> Result<Json<CategoryInfo>, (StatusCode, String)> {
let uid = claims.uid.clone();
let category_query_res = Category::find()
.filter(CategoryColumn::Uid.eq(uid))
.filter(CategoryColumn::Id.eq(id))
.one(&state.conn)
.await;
let category_query: CategoryModel = match category_query_res {
Ok(r) => match r {
Some(res) => res,
None => return Err((StatusCode::NOT_FOUND, "not found".to_string())),
},
Err(e) => return Err((StatusCode::INTERNAL_SERVER_ERROR, e.to_string())),
};
let category_resp = CategoryInfo {
id: category_query.id.into(),
name: category_query.name.clone(),
parent_id: category_query.parent_id.into(),
};
Ok(Json(category_resp))
}
#[debug_handler]
async fn update_category_by_id(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
payload: Json<CategoryInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid = claims.uid.clone();
let mut parent_category_required = false;
let mut parent_id: i64 = 0;
let category_query = Category::find()
.filter(CategoryColumn::Uid.eq(uid))
.filter(CategoryColumn::Id.eq(id))
.one(&state.conn);
let parent_query = match payload.parent_id {
OptionalI64(Some(cid)) => {
if cid > 0 {
parent_category_required = true;
parent_id = cid;
Some(
Category::find()
.filter(CategoryColumn::Uid.eq(uid))
.filter(CategoryColumn::ParentId.eq(cid))
.one(&state.conn),
)
} else {
None
}
}
OptionalI64(None) => None,
};
let (category_result, parent_result) = if let Some(parent_query) = parent_query {
// 并发执行两个查询
let (category, parent) = join!(category_query, parent_query);
// 处理查询结果
(
category.map_err(|e| {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Database error: {}", e),
)
})?,
parent.map_err(|e| {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Database error: {}", e),
)
})?,
)
} else {
// 只查询 category
(
category_query.await.map_err(|e| {
(
StatusCode::INTERNAL_SERVER_ERROR,
format!("Database error: {}", e),
)
})?,
None,
)
};
let category = match category_result {
Some(category) => {
category
}
None => {
return Err((StatusCode::NOT_FOUND, "Category not found".to_string()));
}
};
let parent_category_valid = match parent_result {
Some(_) => { true }
None => { false }
};
let mut resp = SimpleResponse {
code: 0,
message: "success".to_string(),
};
let mut category_active_model : CategoryActiveModel = category.into();
category_active_model.name = Set(payload.name.clone());
category_active_model.updated_at = Set(Local::now().naive_utc());
if parent_category_required && parent_id > 0 {
category_active_model.parent_id = Set(parent_id.into());
}
let update_res = category_active_model.update(&state.conn).await;
match update_res {
Ok(_) => {
resp.code = 0;
resp.message = "ok".to_owned();
}
Err(_) => {
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"category update failed".to_string(),
));
}
}
if parent_category_required && !parent_category_valid {
resp.code = 1;
resp.message = "Parent category not found".to_string();
return Err((StatusCode::NOT_FOUND, "Parent category not found".to_string()));
}
Ok(Json(resp))
}

View File

@@ -1,5 +0,0 @@
pub mod book;
pub mod category;
pub mod tag;
pub mod transaction;
pub mod account;

View File

@@ -1,170 +0,0 @@
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use crate::middleware::auth::Claims;
use crate::model::db::tag::ActiveModel as TagActiveModel;
use crate::model::db::tag::Column as TagColumn;
use crate::model::db::tag::Model as TagModel;
use crate::model::db::prelude::Tag;
use crate::model::http_body::tag::{TagInfo, TagResp};
use crate::model::http_body::common::SimpleResponse;
use crate::AppState;
use sea_orm::sqlx::types::chrono::Local;
use sea_orm::{entity::*, query::*};
use sea_orm::{ColumnTrait};
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/{id}/update",post(update_tag))
.route("/{id}",get(get_tag_by_id))
.route("/", post(create_tag).get(get_all_tags))
}
// handlers
//
#[debug_handler]
async fn get_all_tags(
state: State<AppState>,
claims: Claims,
) -> Result<Json<Vec<TagResp>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let all_tags = Tag::find()
.filter(TagColumn::Uid.eq(uid))
.all(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let mut tags: Vec<TagResp> = Vec::new();
for b in all_tags {
let tag_resp = TagResp {
id: b.id.into(),
name: b.name,
};
tags.push(tag_resp);
}
Ok(Json(tags))
}
#[debug_handler]
async fn get_tag_by_id(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
) -> Result<Json<TagResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let tag_query = Tag::find()
.filter(TagColumn::Uid.eq(uid))
.filter(TagColumn::Id.eq(id))
.one(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let tag_resp: TagResp;
match tag_query {
Some(b) => {
tag_resp = TagResp {
id: b.id.into(),
name: b.name,
};
}
_ => {
return Err((StatusCode::NOT_FOUND, "not_found".to_string()));
}
}
Ok(Json(tag_resp))
}
#[debug_handler]
async fn create_tag(
state: State<AppState>,
claims: Claims,
Json(payload): Json<TagInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let tag = TagActiveModel {
name: Set(payload.name.clone().to_owned()),
uid: Set(uid.to_owned()),
..Default::default()
};
let res = Tag::insert(tag).exec(&state.conn).await;
let mut err_code: i64 = 0;
let mut msg: String;
match res {
Ok(_) => {
err_code = 0;
msg = "ok".to_owned();
}
Err(e) => {
err_code = 0;
msg = e.to_string();
}
}
let resp = SimpleResponse {
code: err_code,
message: msg,
};
Ok(Json(resp))
}
#[debug_handler]
async fn update_tag(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
Json(payload): Json<TagInfo>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let exist_tag = Tag::find()
.filter(TagColumn::Uid.eq(uid))
.filter(TagColumn::Id.eq(id))
.one(&state.conn)
.await;
let tag: TagModel;
let mut resp = SimpleResponse {
code: 0,
message: "ok".to_owned(),
};
match exist_tag {
Ok(b) => match b {
Some(bk) => {
tag = bk;
}
_ => return Err((StatusCode::NOT_FOUND, "not_found".to_string())),
},
Err(_) => {
resp.code = 1;
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"connection_error".to_string(),
));
}
}
let mut tag_active_model: TagActiveModel = tag.into();
tag_active_model.name = Set(payload.name.clone());
tag_active_model.updated_at = Set(Local::now().naive_utc());
let update_res = tag_active_model.update(&state.conn).await;
match update_res {
Ok(_) => {
resp.code = 0;
resp.message = "ok".to_owned();
}
Err(_) => {
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"tag_update_fail".to_string(),
));
}
}
Ok(Json(resp))
}

View File

@@ -1,217 +0,0 @@
use crate::middleware::auth::Claims;
use crate::model::db::category::Column as CategoryColumn;
use crate::model::db::prelude::Category as CategoryPrelude;
use crate::model::db::prelude::Transaction;
use crate::model::db::transaction::{
ActiveModel as TransactionActiveModel, Column as TransactionColumn, Model as TransactionModel,
};
use crate::model::http_body;
use crate::model::http_body::book::BookInfo;
use std::collections::HashMap;
use crate::model::db::prelude::Tag as TagPrelude;
use crate::model::db::tag::{
ActiveModel as TagActiveModel, Column as TagColumn, Model as TagModel,
};
use crate::model::http_body::category::CategoryResp;
use crate::model::http_body::common::SimpleResponse;
use crate::model::http_body::transaction::{TransactionReq, TransactionResp};
use crate::AppState;
use axum::extract::{Path, State};
use axum::http::StatusCode;
use axum::routing::{get, post};
use axum::{Json, Router};
use axum_macros::debug_handler;
use sea_orm::sqlx::types::chrono::Local;
use sea_orm::{ColumnTrait, DatabaseConnection};
use sea_orm::QueryFilter;
use sea_orm::{entity::*, query::*};
use serde_json::error::Category;
use std::ptr::null;
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/{id}/update", post(update_transaction_handler))
.route("/{id}", get(get_transaction_by_id_handler))
.route(
"/",
post(create_transaction_handler).get(get_all_transactions_handler),
)
}
async fn update_transaction_handler(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
Json(payload): Json<TransactionReq>,
) -> Result<Json<SimpleResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid;
let exist_transaction = Transaction::find()
.filter(TransactionColumn::Id.eq(id))
.filter(TransactionColumn::Uid.eq(uid))
.one(&state.conn)
.await;
let mut resp = SimpleResponse {
code: 0,
message: "".to_string(),
};
let transaction: TransactionModel;
match exist_transaction {
Err(_) => {
resp.code = 1;
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"connection_error".to_string(),
));
}
Ok(tra) => match tra {
Some(tr) => {
transaction = tr;
}
_ => return Err((StatusCode::NOT_FOUND, "Transaction not found".to_string())),
},
}
let mut tr_active: TransactionActiveModel = transaction.into();
match payload.description {
None => {}
Some(input_desc) => {
tr_active.description = Set(input_desc);
}
}
// TODO category
let new_category_id: i64 = match payload.category_id {
None => {
return Err((
StatusCode::BAD_REQUEST,
"category_id is not valid".to_string(),
))
}
Some(cid_string) => match cid_string.parse::<i64>() {
Ok(cid) => cid,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"category_id is not valid".to_string(),
))
}
},
};
let new_category_id_exist = CategoryPrelude::find()
.filter(CategoryColumn::Id.eq(new_category_id))
.filter(CategoryColumn::Uid.eq(uid))
.all(&state.conn)
.await;
match new_category_id_exist {
Ok(_) => {}
Err(_) => {
return Err((StatusCode::NOT_FOUND, "category_id not found".to_string()));
}
}
// TODO tags
let tag_exist = check_tags_exist(&state.conn, payload.tags).await;
let all_tag_exist: bool;
match tag_exist {
Ok(tag_res) => {
all_tag_exist = tag_res.values().all(|&exists| exists);
}
Err(_) => {
return Err((StatusCode::NOT_FOUND, "tag not found".to_string()));
}
}
if !all_tag_exist {
return Err((StatusCode::NOT_FOUND, "tag not found".to_string()));
}
// TODO amounts
// Update
tr_active.updated_at = Set(Local::now().naive_utc());
let update_res = tr_active.update(&state.conn).await;
match update_res {
Ok(_) => {
resp.code = 0;
}
Err(_) => {
return Err((
StatusCode::INTERNAL_SERVER_ERROR,
"transaction_update_failed".to_string(),
));
}
}
Ok(Json(resp))
}
async fn create_transaction_handler() {}
async fn get_transaction_by_id_handler(
Path(id): Path<i64>,
state: State<AppState>,
claims: Claims,
) -> Result<Json<TransactionResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let transaction_query = Transaction::find()
.filter(TransactionColumn::Uid.eq(id))
.filter(TransactionColumn::Id.eq(id))
.one(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
let response: TransactionResp;
match transaction_query {
None => {
return Err((StatusCode::NOT_FOUND, "Transaction not found".to_string()));
}
Some(x) => {
response = TransactionResp {
id: x.id,
description: x.description,
category: CategoryResp {
id: 0,
name: "".to_string(),
parent_id: 0,
},
tags: vec![],
}
}
};
Ok(Json(response))
}
async fn get_all_transactions_handler() {}
// 批量检查 TagModel 是否存在
async fn check_tags_exist(
connection: &DatabaseConnection,
ids: Vec<String>,
) -> Result<HashMap<i64, bool>, String> {
// 将 Vec<String> 转换为 Vec<i64>,并处理可能的转换错误
let ids_i64: Vec<i64> = ids
.into_iter()
.filter_map(|id| id.parse::<i64>().ok())
.collect();
if ids_i64.is_empty() {
return Ok(HashMap::new());
}
// 构建 IN 查询条件
let condition = Condition::any().add(TagColumn::Id.is_in(ids_i64.clone()));
// 执行批量查询,获取存在的 TagModel
let found_tags = TagPrelude::find()
.filter(condition)
.all(connection)
.await
.map_err(|e| format!("Database error: {}", e))?;
// 创建 HashMap 记录每个 ID 是否存在
let mut result = HashMap::new();
for id in ids_i64 {
result.insert(id, found_tags.iter().any(|tag| tag.id == id));
}
Ok(result)
}

151
src/category/handler.rs Normal file
View File

@@ -0,0 +1,151 @@
// use std::sync::Arc;
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::prelude::*;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::model::db_model;
use crate::model::schema;
use crate::util;
// use crate::model::schema::categories::dsl::categories;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
#[derive(Serialize)]
pub struct CreateCategoryResponse {
id: i64,
name: String,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", post(create_category).get(get_all_categories))
.route("/:id", post(update_category).get(get_category))
}
#[derive(Deserialize)]
pub struct CreateCategoryRequest {
name: String,
}
#[debug_handler]
pub async fn create_category(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateCategoryRequest>,
) -> Result<Json<db_model::Category>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone(); // TODO replace with actual user id.
// let ret = CreateCategoryResponse{id: 134132413541, name: "24532452".to_string()};
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let new_category = db_model::CategoryForm {
name: payload.name,
uid,
};
let res = conn
.interact(move |conn| {
diesel::insert_into(schema::categories::table)
.values(&new_category)
.returning(db_model::Category::as_returning())
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
// let ret = CreateCategoryResponse{id: res.id, name: res.name};
Ok(Json(res))
}
pub async fn update_category(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateCategoryRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone(); // TODO replace with actual user id.
// let ret = CreateCategoryResponse{id: 134132413541, name: "24532452".to_string()};
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let res = conn
.interact(move |conn| {
diesel::update(schema::categories::table)
.filter(schema::categories::id.eq(id))
.filter(schema::categories::uid.eq(uid))
.set((
schema::categories::name.eq(payload.name),
schema::categories::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
// let ret = CreateCategoryResponse{id: res.id, name: res.name};
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_category(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Category>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::categories::table
.filter(schema::categories::id.eq(id))
.filter(schema::categories::uid.eq(uid))
.select(db_model::Category::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_categories(
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<Vec<db_model::Category>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::categories::table
.filter(schema::categories::uid.eq(uid))
.select(db_model::Category::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

1
src/category/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod handler;

View File

@@ -1 +0,0 @@
pub mod transaction;

View File

View File

@@ -1 +0,0 @@
pub mod transaction_tag_rel;

View File

@@ -1,130 +0,0 @@
use sea_orm::{DatabaseConnection, EntityTrait, ActiveModelTrait, Set, QueryFilter, ColumnTrait, DbErr};
use sea_orm::sqlx::types::chrono::{Local};
use crate::model::db::prelude::TransactionTagRel as TransactionTagRelPrelude;
use crate::model::db::transaction_tag_rel::{
ActiveModel as TransactionTagRelActiveModel,
Model as TransactionTagRelModel,
Column as TransactionTagRelColumn,
};
// DAO struct for TransactionTagRel
pub struct TransactionTagRelDAO {
db: DatabaseConnection,
}
impl TransactionTagRelDAO {
// Constructor
pub fn new(db: DatabaseConnection) -> Self {
Self { db }
}
// Create a new TransactionTagRel
pub async fn create(
&self,
uid: i64,
transaction_id: i64,
tag_id: i64,
) -> Result<TransactionTagRelModel, DbErr> {
let active_model = TransactionTagRelActiveModel {
uid: Set(uid),
transaction_id: Set(transaction_id),
tag_id: Set(tag_id),
is_deleted: Set(false),
// created_at: Set(Utc::now()),
// updated_at: Set(Utc::now()),
..Default::default() // id is auto-incremented
};
active_model.insert(&self.db).await
}
// Find by ID
pub async fn find_by_id(&self, id: i64, uid:i64) -> Result<Option<TransactionTagRelModel>, DbErr> {
TransactionTagRelPrelude::find_by_id(id)
.filter(TransactionTagRelColumn::Uid.eq(uid))
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
.one(&self.db)
.await
}
// Find all by transaction_id
pub async fn find_by_transaction_id(
&self,
transaction_id: i64,
uid: i64,
) -> Result<Vec<TransactionTagRelModel>, DbErr> {
TransactionTagRelPrelude::find()
.filter(TransactionTagRelColumn::Uid.eq(uid))
.filter(TransactionTagRelColumn::TransactionId.eq(transaction_id))
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
.all(&self.db)
.await
}
// Find all by tag_id
pub async fn find_by_tag_id(
&self,
tag_id: i64,
uid: i64,
) -> Result<Vec<TransactionTagRelModel>, DbErr> {
TransactionTagRelPrelude::find()
.filter(TransactionTagRelColumn::Uid.eq(uid))
.filter(TransactionTagRelColumn::TagId.eq(tag_id))
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
.all(&self.db)
.await
}
// Update a TransactionTagRel
pub async fn update(
&self,
id: i64,
uid: Option<i64>,
transaction_id: Option<i64>,
tag_id: Option<i64>,
) -> Result<TransactionTagRelModel, DbErr> {
let mut active_model: TransactionTagRelActiveModel = TransactionTagRelPrelude::find_by_id(id)
.filter(TransactionTagRelColumn::Uid.eq(uid))
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
.one(&self.db)
.await?
.ok_or(DbErr::RecordNotFound("TransactionTagRel not found".into()))?
.into();
if let Some(uid) = uid {
active_model.uid = Set(uid);
}
if let Some(transaction_id) = transaction_id {
active_model.transaction_id = Set(transaction_id);
}
if let Some(tag_id) = tag_id {
active_model.tag_id = Set(tag_id);
}
active_model.updated_at = Set(Local::now().naive_utc());
active_model.update(&self.db).await
}
// Soft delete (set is_deleted = true)
pub async fn soft_delete(&self, id: i64, uid: i64) -> Result<(), DbErr> {
let mut active_model: TransactionTagRelActiveModel = TransactionTagRelPrelude::find_by_id(id)
.filter(TransactionTagRelColumn::Uid.eq(uid))
.filter(TransactionTagRelColumn::IsDeleted.eq(false))
.one(&self.db)
.await?
.ok_or(DbErr::RecordNotFound("TransactionTagRel not found".into()))?
.into();
active_model.is_deleted = Set(true);
active_model.updated_at = Set(Local::now().naive_utc());
active_model.update(&self.db).await?;
Ok(())
}
// Hard delete (optional, use with caution)
pub async fn hard_delete(&self, id: i64, uid: i64) -> Result<(), DbErr> {
TransactionTagRelPrelude::delete_by_id(id)
.exec(&self.db).await?;
Ok(())
}
}

View File

@@ -1,7 +1,10 @@
use crate::middleware::auth; use axum::{
use axum::{http::Method, Router}; // http::StatusCode,
use clap::Parser; // routing::{get, post},
use sea_orm::{Database, DatabaseConnection, Iden}; // Json,
Router,
};
use axum::http::Method;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tower::ServiceBuilder; use tower::ServiceBuilder;
use tower_http::cors::{Any, CorsLayer}; use tower_http::cors::{Any, CorsLayer};
@@ -10,99 +13,34 @@ use tracing::info;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
// Project modules // Project modules
mod api; mod category;
mod middleware; mod middleware;
mod model; mod model;
mod util; mod util;
mod query;
mod dao; // Passed App State
mod dal; #[derive(Clone)]
pub struct AppState {
db: deadpool_diesel::postgres::Pool,
}
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
// dotenvy::dotenv().unwrap(); dotenvy::dotenv().unwrap();
// initialize tracing
tracing_subscriber::registry() tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer()) .with(tracing_subscriber::fmt::layer())
.init(); .init();
let cli = Cli::parse(); // initialize db connection
match cli.command { let db_url = std::env::var("DATABASE_URL").unwrap();
Command::Serve { config_path } => {
if let Ok(config) = load_config(&config_path).await {
println!("Loaded config.");
println!("{},{}", config.service.host.clone(), config.service.port);
// Proceed with server initialization using `config`
start_server(&config).await;
} else {
eprintln!("Failed to load config from {}", config_path);
}
}
Command::PrintExampleConfig {}=>{
print_default_config().await;
}
}
}
#[derive(Clone)] let manager = deadpool_diesel::postgres::Manager::new(db_url, deadpool_diesel::Runtime::Tokio1);
struct AppState { let pool = deadpool_diesel::postgres::Pool::builder(manager)
conn: DatabaseConnection, .build()
} .unwrap();
#[derive(Deserialize,Serialize)] let shared_state = AppState { db: pool };
struct Key {
jwt: String,
user: String,
}
#[derive(Deserialize,Serialize)] // Register routers
struct DatabaseConf {
connection: String,
}
#[derive(Deserialize,Serialize)]
struct ServiceConf {
host: String,
port: u32,
}
#[derive(Deserialize,Serialize)]
struct Config {
service: ServiceConf,
database: DatabaseConf,
keys: Key,
}
#[derive(clap::Parser)]
struct Cli {
#[command(subcommand)]
command: Command,
}
#[derive(clap::Subcommand)]
enum Command {
Serve {
#[arg(long = "conf")]
config_path: String,
},
PrintExampleConfig {},
}
async fn load_config(path: &str) -> Result<Config, Box<dyn std::error::Error>> {
let content = tokio::fs::read_to_string(path).await?;
let config: Config = toml::from_str(&content)?;
Ok(config)
}
// ====== Commands ======
// start http server
async fn start_server(config: &Config) {
let conn = Database::connect(&config.database.connection)
.await
.expect("Database connection failed.");
auth::initialize_jwt_key(config.keys.jwt.clone());
let state = AppState { conn };
// Build router
let cors_layer = CorsLayer::new() let cors_layer = CorsLayer::new()
.allow_methods([Method::GET, Method::POST]) .allow_methods([Method::GET, Method::POST])
.allow_origin(Any); .allow_origin(Any);
@@ -111,36 +49,13 @@ async fn start_server(config: &Config) {
.layer(cors_layer); .layer(cors_layer);
let app = Router::new() let app = Router::new()
.nest("/api/v1/book", api::book::get_nest_handlers()) // V1 apis
.nest("/api/v1/category", api::category::get_nested_handlers()) .nest("/api/v1/category", category::handler::get_nest_handlers())
.nest("/api/v1/tag", api::tag::get_nest_handlers()) .nest("/api/v1/v2", category::handler::get_nest_handlers())
.nest("/api/v1/transaction", api::transaction::get_nest_handlers()) .with_state(shared_state)
.with_state(state)
.layer(global_layer); .layer(global_layer);
let host = config.service.host.clone();
let port = config.service.port;
let server_url = format!("{host}:{port}");
let listener = tokio::net::TcpListener::bind(&server_url).await.unwrap();
axum::serve(listener, app)
.await
.expect("Service panic happened");
}
async fn print_default_config() { let listener = tokio::net::TcpListener::bind("0.0.0.0:8987").await.unwrap();
let example_conf = Config{ info!("starting server on 0.0.0.0:8987");
service: ServiceConf { axum::serve(listener, app).await.unwrap();
host: "localhost".to_string(),
port: 8080,
},
database: DatabaseConf {
connection: "postgres://postgres:postgres@localhost/test_db".to_string(),
},
keys: Key {
jwt: "THIS_IS_TEST_CONFIG".to_string(),
user: "test_user".to_string(),
},
};
// 序列化为 TOML 字符串
let toml_string = toml::to_string(&example_conf);
println!("#This is an example config.\n{}", toml_string.unwrap());
} }

View File

@@ -1,23 +1,28 @@
use axum::{ use axum::{
async_trait,
extract::FromRequestParts, extract::FromRequestParts,
http::{request::Parts, StatusCode}, http::{
response::{IntoResponse, Response}, request::Parts,
StatusCode,
},
Json, RequestPartsExt, Json, RequestPartsExt,
response::{IntoResponse, Response},
}; };
use axum_extra::{ use axum_extra::{
headers::{authorization::Bearer, Authorization}, headers::{authorization::Bearer, Authorization},
TypedHeader, TypedHeader,
}; };
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use jsonwebtoken::{decode,encode, DecodingKey, EncodingKey, Header, Validation};
use std::fmt::Display; use std::fmt::Display;
use std::sync::OnceLock; use once_cell::sync::Lazy;
use crate::util;
#[derive(Debug, Serialize, Deserialize)] #[derive(Debug, Serialize, Deserialize)]
pub struct Claims { pub struct Claims {
sub: String, sub: String,
// company: String, company: String,
exp: usize, exp: usize,
pub uid: i64, pub uid: i64,
} }
@@ -42,15 +47,10 @@ pub enum AuthError {
InvalidToken, InvalidToken,
} }
static KEYS: OnceLock<Keys> = OnceLock::new(); static KEYS: Lazy<Keys> = Lazy::new(|| {
let secret = std::env::var("JWT_SECRET").expect("JWT_SECRET must be set");
pub fn initialize_jwt_key(key_str: String) { Keys::new(secret.as_bytes())
let res = KEYS.set(Keys::new(key_str.as_bytes())); });
match res {
Ok(_) => {}
Err(_) => panic!("jwt key initialize failed"),
}
}
struct Keys { struct Keys {
encoding: EncodingKey, encoding: EncodingKey,
@@ -68,7 +68,7 @@ impl Keys {
impl Display for Claims { impl Display for Claims {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Email: {}", self.sub) write!(f, "Email: {}\nCompany: {}", self.sub, self.company)
} }
} }
@@ -81,29 +81,27 @@ impl AuthBody {
} }
} }
#[async_trait]
impl<S> FromRequestParts<S> for Claims impl<S> FromRequestParts<S> for Claims
where where
S: Send + Sync, S: Send + Sync,
{ {
type Rejection = AuthError; type Rejection = (StatusCode, String);
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> { async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
// Extract the token from the authorization header // Extract the token from the authorization header
let TypedHeader(Authorization(bearer)) = parts let TypedHeader(Authorization(bearer)) = parts
.extract::<TypedHeader<Authorization<Bearer>>>() .extract::<TypedHeader<Authorization<Bearer>>>()
.await .await
.map_err(|_| AuthError::InvalidToken)?; .map_err(util::req::internal_error)?;
// Decode the user data // Decode the user data
let token_data = decode::<Claims>( let token_data = decode::<Claims>(bearer.token(), &KEYS.decoding, &Validation::default())
bearer.token(), .map_err(util::req::internal_error)?;
&KEYS.get().unwrap().decoding,
&Validation::default(),
)
.map_err(|_| AuthError::InvalidToken)?;
Ok(token_data.claims) Ok(token_data.claims)
} }
} }
impl IntoResponse for AuthError { impl IntoResponse for AuthError {
fn into_response(self) -> Response { fn into_response(self) -> Response {
let (status, error_message) = match self { let (status, error_message) = match self {

View File

@@ -1,22 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "account")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub account_type: i32,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,21 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "book")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,22 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "category")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub parent_id: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,10 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
pub mod prelude;
pub mod account;
pub mod book;
pub mod category;
pub mod tag;
pub mod transaction;
pub mod transaction_tag_rel;

View File

@@ -1,8 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
pub use super::account::Entity as Account;
pub use super::book::Entity as Book;
pub use super::category::Entity as Category;
pub use super::tag::Entity as Tag;
pub use super::transaction::Entity as Transaction;
pub use super::transaction_tag_rel::Entity as TransactionTagRel;

View File

@@ -1,21 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "tag")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,25 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "transaction")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub uid: i64,
pub transaction_type: i32,
pub book_id: i64,
pub category_id: i64,
pub description: String,
pub transaction_time: DateTimeWithTimeZone,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,22 +0,0 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.16
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "transaction_tag_rel")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub uid: i64,
pub transaction_id: i64,
pub tag_id: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

21
src/model/db_model.rs Normal file
View File

@@ -0,0 +1,21 @@
use crate::model::schema;
use diesel::prelude::*;
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::categories)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Category {
id: i64,
uid: i64,
name: String,
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::categories)]
pub struct CategoryForm {
pub uid: i64,
pub name: String,
}

View File

@@ -1,14 +0,0 @@
use serde::{Serialize, Deserialize};
#[derive(Serialize)]
pub struct AccountResp {
pub id: String,
pub name: String,
pub account_type: String,
}
#[derive(Deserialize, Serialize, Debug)]
pub struct AccountReq {
pub name: Option<String>,
pub account_type: Option<String>,
}

View File

@@ -1,14 +0,0 @@
use serde::{Serialize, Deserialize};
use super::common::{number_stringify, OptionalI64};
#[derive(Serialize)]
pub struct BookResp {
#[serde(with="number_stringify")]
pub id: OptionalI64,
pub name: String,
}
#[derive(Serialize,Deserialize)]
pub struct BookInfo {
pub name: String,
}

View File

@@ -1,17 +0,0 @@
use serde::{Deserialize, Serialize};
use super::common::{number_stringify, OptionalI64};
#[derive(Serialize, Deserialize)]
pub struct CategoryResp {
pub id: i64,
pub name: String,
pub parent_id: i64,
}
#[derive(Serialize, Deserialize)]
pub struct CategoryInfo {
#[serde(with="number_stringify")]
pub id: OptionalI64,
pub name: String,
#[serde(with="number_stringify")]
pub parent_id: OptionalI64,
}

View File

@@ -1,88 +0,0 @@
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter, Result as FmtResult};
use std::num::ParseIntError;
use std::str::FromStr;
#[derive(Serialize)]
pub struct SimpleResponse {
pub code: i64,
pub message: String,
}
#[derive(Debug)]
pub struct OptionalI64(pub Option<i64>);
impl OptionalI64 {
// 构造函数:从 i64 创建 Some
pub fn new(value: i64) -> Self {
OptionalI64(Some(value))
}
// 构造函数:直接创建 None
pub fn none() -> Self {
OptionalI64(None)
}
// 从 Option<i64> 转换
pub fn from_option(value: Option<i64>) -> Self {
OptionalI64(value)
}
}
impl From<i64> for OptionalI64 {
fn from(value: i64) -> Self {
OptionalI64(Some(value))
}
}
impl From<Option<i64>> for OptionalI64 {
fn from(value: Option<i64>) -> Self {
OptionalI64(value)
}
}
impl FromStr for OptionalI64 {
type Err = std::num::ParseIntError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.is_empty() || s.eq_ignore_ascii_case("null") {
Ok(OptionalI64(None))
} else {
s.parse::<i64>().map(|n| OptionalI64(Some(n)))
}
}
}
impl Display for OptionalI64 {
fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult {
match self.0 {
Some(num) => write!(f, "{}", num), // 有值时输出数字
None => write!(f, ""), // None 时输出空字符串
}
}
}
pub mod number_stringify {
use std::fmt::Display;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serializer};
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where
T: Display,
S: Serializer,
{
serializer.collect_str(value)
}
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where
T: FromStr,
T::Err: Display,
D: Deserializer<'de>,
{
String::deserialize(deserializer)?
.parse()
.map_err(de::Error::custom)
}
}

View File

@@ -1,6 +0,0 @@
pub mod book;
pub mod common;
pub mod category;
pub mod tag;
pub mod transaction;
pub mod account;

View File

@@ -1,14 +0,0 @@
use serde::{Serialize, Deserialize};
use super::common::{number_stringify, OptionalI64};
#[derive(Serialize)]
pub struct TagResp {
#[serde(with="number_stringify")]
pub id: OptionalI64,
pub name: String,
}
#[derive(Serialize,Deserialize)]
pub struct TagInfo {
pub name: String,
}

View File

@@ -1,25 +0,0 @@
use serde::{Serialize, Deserialize};
use crate::model::http_body::tag::TagInfo;
use crate::model::http_body::category::CategoryResp;
use crate::model::http_body::common::OptionalI64;
#[derive(Serialize,Deserialize)]
pub struct TransactionResp {
pub id: i64,
pub description: String,
pub category: CategoryResp,
pub tags: Vec<TagInfo>,
}
#[derive(Serialize,Deserialize)]
pub struct TransactionReq {
pub id: Option<String>,
pub description: Option<String>,
pub category_id: Option<String>,
pub tags: Vec<String>,
}
pub struct TransactionAmountReq {
pub id: Option<String>,
pub account_id: Option<String>,
}

View File

@@ -1,2 +1,2 @@
pub mod db; pub mod db_model;
pub mod http_body; pub mod schema;

109
src/model/schema.rs Normal file
View File

@@ -0,0 +1,109 @@
// @generated automatically by Diesel CLI.
diesel::table! {
accounts (id) {
id -> Int8,
uid -> Int8,
name -> Text,
#[sql_name = "type"]
type_ -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
amounts (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
value -> Int8,
expo -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
books (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
categories (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
tags (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transaction_tag_rels (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
tag_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transactions (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
description -> Text,
category_id -> Int8,
is_delete -> Bool,
time -> Timestamptz,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
users (id) {
id -> Int8,
username -> Text,
password -> Text,
mail -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::allow_tables_to_appear_in_same_query!(
accounts,
amounts,
books,
categories,
tags,
transaction_tag_rels,
transactions,
users,
);

View File

@@ -1,4 +0,0 @@
// use crate::model::db::prelude::Book;
// pub fn get_book_by_id(id:i64, uid:i64)->Option<Book> {
//
// }

View File

@@ -1 +0,0 @@
mod book;

109
src/schema.rs Normal file
View File

@@ -0,0 +1,109 @@
// @generated automatically by Diesel CLI.
diesel::table! {
accounts (id) {
id -> Int8,
uid -> Int8,
name -> Text,
#[sql_name = "type"]
type_ -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
amounts (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
value -> Int8,
expo -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
books (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
categories (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
tags (id) {
id -> Int8,
uid -> Int8,
name -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transaction_tag_rels (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
tag_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transactions (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
description -> Text,
category_id -> Int8,
is_delete -> Bool,
time -> Timestamptz,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
users (id) {
id -> Int8,
username -> Text,
password -> Text,
mail -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::allow_tables_to_appear_in_same_query!(
accounts,
amounts,
books,
categories,
tags,
transaction_tag_rels,
transactions,
users,
);

View File

@@ -1 +1 @@
pub mod pass; pub mod req;

View File

@@ -1,16 +0,0 @@
use std::error::Error;
use pbkdf2::{
password_hash::{
rand_core::OsRng,
PasswordHash,SaltString,
},
Pbkdf2,
};
use pbkdf2::password_hash::PasswordHasher;
pub fn get_pbkdf2_from_psw(password:String) -> Result<String, pbkdf2::password_hash::Error> {
let salt = SaltString::generate(&mut OsRng);
let password_hash = Pbkdf2.hash_password(password.as_bytes(), &salt)?.to_string();
println!("{}",password_hash);
return Ok(password_hash)
}

14
src/util/req.rs Normal file
View File

@@ -0,0 +1,14 @@
use axum::http::StatusCode;
use serde::Serialize;
#[derive(Serialize)]
pub struct CommonResp {
pub code: i64,
}
pub fn internal_error<E>(err: E) -> (StatusCode, String)
where
E: std::error::Error,
{
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
}