Compare commits

..

1 Commits

Author SHA1 Message Date
brian
27c94f4276 feat: init book handler 2025-06-08 20:42:24 +08:00
54 changed files with 3078 additions and 2983 deletions

View File

@@ -1,8 +0,0 @@
root = true
[*]
end_of_line = lf
insert_final_newline = true
[Makefile]
indent_style = tab

View File

@@ -1 +0,0 @@
DATABASE_URL=postgres://username:password@localhost/diesel_demo

1
.gitignore vendored
View File

@@ -3,3 +3,4 @@
.vscode
.DS_Store
.env
conf.toml

2827
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,25 +4,22 @@ version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[workspace]
members = [".", "entity", "migration"]
[dependencies]
async-trait = "0.1.81"
axum = {version = "0.8", features = ["macros"]}
axum-extra = { version = "0.10", features = ["typed-header"] }
chrono = {version = "0.4", features = ["serde"]}
deadpool-diesel = {version ="0.6.1", features = ["postgres"]}
diesel = { version = "2", features = ["postgres", "chrono"] }
dotenvy = "0.15"
jsonwebtoken = "9.3.0"
serde = { version = "1.0.202", features = ["derive"] }
serde_json = "1"
tokio = { version = "1.37.0", features = ["full"] }
tower = "0.4.13"
tower-http = {version= "0.5.2", features=["trace", "cors"] }
axum = { version = "0.8" }
axum-macros = "0.5"
sea-orm = { version = "1.1.12", features = [
"sqlx-postgres",
"runtime-tokio-rustls",
"macros",
"chrono",
] }
serde = { version = "1", features = ["derive"] }
tokio = { version = "1.0", features = ["full"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter"] }
once_cell = "1.19.0"
axum-macros = "0.4.1"
pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core ={version = "0.6", features = ["std"]}
regex = {version = "1.10"}
dotenvy = "0.15.7"
toml = "0.8.22"
clap = { version = "4.0", features = ["derive"] }

View File

@@ -1,2 +0,0 @@
build-schema:
diesel print-schema > src/model/schema.rs

View File

@@ -1,9 +0,0 @@
# For documentation on how to configure this file,
# see https://diesel.rs/guides/configuring-diesel-cli
[print_schema]
file = "src/model/schema.rs"
custom_type_derives = ["diesel::query_builder::QueryId", "Clone"]
[migrations_directory]
dir = "./migrations"

22
migration/Cargo.toml Normal file
View File

@@ -0,0 +1,22 @@
[package]
name = "migration"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
async-std = { version = "1", features = ["attributes", "tokio1"] }
[dependencies.sea-orm-migration]
version = "1.1.12"
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio-rustls", # `ASYNC_RUNTIME` feature
"sqlx-postgres", # `DATABASE_DRIVER` feature
]

41
migration/README.md Normal file
View File

@@ -0,0 +1,41 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```

22
migration/src/lib.rs Normal file
View File

@@ -0,0 +1,22 @@
pub use sea_orm_migration::prelude::*;
mod m20250525_000001_create_ledger_table_category;
mod m20250525_000002_create_ledger_table_book;
mod m20250525_000003_create_ledger_table_tag;
mod m20250525_000004_create_ledger_table_account;
mod m20250525_000005_create_ledger_table_transaction;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20250525_000001_create_ledger_table_category::Migration),
Box::new(m20250525_000002_create_ledger_table_book::Migration),
Box::new(m20250525_000003_create_ledger_table_tag::Migration),
Box::new(m20250525_000004_create_ledger_table_account::Migration),
Box::new(m20250525_000005_create_ledger_table_transaction::Migration),
]
}
}

View File

@@ -0,0 +1,64 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000001_create_ledger_table_category" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Category::Table)
.col(
ColumnDef::new(Category::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Category::Name).string().not_null())
.col(ColumnDef::new(Category::Uid).big_integer().not_null())
.col(ColumnDef::new(Category::ParentId).big_integer().default(0i64).not_null())
.col(ColumnDef::new(Category::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Category::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Category::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Category::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Category {
Table,
Id,
Name,
Uid,
ParentId,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,63 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000002_create_ledger_table_book" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
// Define how to apply this migration
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Book::Table)
.col(
ColumnDef::new(Book::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Book::Name).string().not_null())
.col(ColumnDef::new(Book::Uid).big_integer().not_null())
.col(ColumnDef::new(Book::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Book::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Book::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Book::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Book {
Table,
Id,
Name,
Uid,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,62 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000001_create_ledger_table_tag" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Tag::Table)
.col(
ColumnDef::new(Tag::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Tag::Name).string().not_null())
.col(ColumnDef::new(Tag::Uid).big_integer().not_null())
.col(ColumnDef::new(Tag::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Tag::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Tag::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Tag::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Tag {
Table,
Id,
Name,
Uid,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,64 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000004_create_ledger_table_account" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Account::Table)
.col(
ColumnDef::new(Account::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Account::Name).string().not_null())
.col(ColumnDef::new(Account::Type).integer().not_null())
.col(ColumnDef::new(Account::Uid).big_integer().not_null())
.col(ColumnDef::new(Account::IsDeleted).boolean().default(false).not_null())
.col(
ColumnDef::new(Account::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Account::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Account::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Account {
Table,
Id,
Name,
Uid,
Type,
IsDeleted,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,84 @@
use sea_orm_migration::prelude::*;
pub struct Migration;
impl MigrationName for Migration {
fn name(&self) -> &str {
"m20250525_000005_create_ledger_table_transaction" // Make sure this matches with the file name
}
}
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Transaction::Table)
.col(
ColumnDef::new(Transaction::Id)
.big_integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(Transaction::Uid).big_integer().not_null())
.col(ColumnDef::new(Transaction::Type).integer().not_null())
.col(ColumnDef::new(Transaction::BookId).big_integer().not_null())
.col(
ColumnDef::new(Transaction::CategoryId)
.big_integer()
.not_null(),
)
.col(ColumnDef::new(Transaction::Description).string().not_null())
.col(
ColumnDef::new(Transaction::TransactionTime)
.timestamp_with_time_zone()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Transaction::IsDeleted)
.boolean()
.default(false)
.not_null(),
)
.col(
ColumnDef::new(Transaction::CreatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.col(
ColumnDef::new(Transaction::UpdatedAt)
.date_time()
.default(Expr::current_timestamp())
.not_null(),
)
.to_owned(),
)
.await
}
// Define how to rollback this migration: Drop the Bakery table.
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Transaction::Table).to_owned())
.await
}
}
#[derive(Iden)]
pub enum Transaction {
Table,
Id,
Uid,
Type,
BookId,
CategoryId,
Description,
TransactionTime,
IsDeleted,
CreatedAt,
UpdatedAt,
}

6
migration/src/main.rs Normal file
View File

@@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[async_std::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

View File

@@ -1,6 +0,0 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass);
DROP FUNCTION IF EXISTS diesel_set_updated_at();

View File

@@ -1,36 +0,0 @@
-- This file was automatically created by Diesel to setup helper functions
-- and other internal bookkeeping. This file is safe to edit, any future
-- changes will be added to existing projects as new migrations.
-- Sets up a trigger for the given table to automatically set a column called
-- `updated_at` whenever the row is modified (unless `updated_at` was included
-- in the modified columns)
--
-- # Example
--
-- ```sql
-- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW());
--
-- SELECT diesel_manage_updated_at('users');
-- ```
CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$
BEGIN
EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s
FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl);
END;
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$
BEGIN
IF (
NEW IS DISTINCT FROM OLD AND
NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at
) THEN
NEW.updated_at := current_timestamp;
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;

View File

@@ -1,12 +0,0 @@
-- This file should undo anything in `up.sql`
-- This file should undo anything in `up.sql`
DROP TABLE IF EXISTS "categories";
DROP TABLE IF EXISTS "tags";
DROP TABLE IF EXISTS "books";
DROP TABLE IF EXISTS "transactions";
DROP TABLE IF EXISTS "transaction_tag_rels";
DROP TABLE IF EXISTS "accounts";
DROP TABLE IF EXISTS "amounts";
DROP TABLE IF EXISTS "users";
DROP TABLE IF EXISTS "operations";
DROP TABLE IF EXISTS "operation_snapshots";

View File

@@ -1,112 +0,0 @@
-- Your SQL goes here
CREATE TABLE "categories" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"book_id" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"level" INT NOT NULL DEFAULT 0,
"parent_category_id" BIGINT NOT NULL DEFAULT 0,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "tags" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"book_id" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"level" INT NOT NULL DEFAULT 0,
"parent_tag_id" BIGINT NOT NULL DEFAULT 0,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "books" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "transactions" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"book_id" BIGINT NOT NULL,
"description" TEXT NOT NULL,
"category_id" BIGINT NOT NULL,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"time" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT current_timestamp,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "transaction_tag_rels" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"transaction_id" BIGINT NOT NULL,
"tag_id" BIGINT NOT NULL,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "accounts" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"name" TEXT NOT NULL,
"account_type" BIGINT NOT NULL DEFAULT 0,
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "amounts" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"account_id" BIGINT NOT NULL,
"transaction_id" BIGINT NOT NULL,
"value" BIGINT NOT NULL DEFAULT 0,
"expo" BIGINT NOT NULL DEFAULT 5,
"currency" TEXT NOT NULL DEFAULT '',
"op_id" BIGINT NOT NULL DEFAULT 0,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "users" (
"id" BIGSERIAL PRIMARY KEY,
"username" TEXT NOT NULL UNIQUE,
"password" TEXT NOT NULL,
"mail" TEXT NOT NULL,
"is_delete" BOOLEAN NOT NULL DEFAULT FALSE,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "operations" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"entity_type" BIGINT NOT NULL,
"entity_id" BIGINT NOT NULL,
"action" BIGINT NOT NULL,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp,
"update_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);
CREATE TABLE "operation_snapshots" (
"id" BIGSERIAL PRIMARY KEY,
"uid" BIGINT NOT NULL,
"max_op_id" BIGINT NOT NULL,
"create_at" TIMESTAMP NOT NULL DEFAULT current_timestamp
);

48
src/api/book.rs Normal file
View File

@@ -0,0 +1,48 @@
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use sea_orm::ColumnTrait;
use sea_orm::{entity::*, query::*,};
use crate::model::db::prelude::Book;
use crate::model::db::book::Model as BookModel;
use crate::model::db::book::Column as BookColumn;
use crate::model::http_body::book;
use crate::AppState;
use crate::model::http_body::book::BookItem;
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", get(get_all_books_handler))
// .route("/{id}", post(update_book).get(get_book))
}
// handlers
//
#[debug_handler]
async fn get_all_books_handler(
state: State<AppState>
) -> Result<Json<Vec<book::BookItem>>,(StatusCode,String)> {
// let conn = state.conn.get_postgres_connection_pool();
let uid :i64 = 1;
let all_books = Book::find()
.filter(BookColumn::Uid.eq(uid))
.all(&state.conn)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR,e.to_string()))?;
let mut books: Vec<BookItem> = Vec::new();
for b in all_books {
let book_resp = BookItem{
id: b.id,
name: b.name,
};
books.push(book_resp);
}
Ok(Json(books))
}

1
src/api/mod.rs Normal file
View File

@@ -0,0 +1 @@
pub mod book;

View File

@@ -1,150 +0,0 @@
// use std::sync::Arc;
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::prelude::*;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::model::db_model;
use crate::model::schema;
use crate::util;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
#[derive(Deserialize)]
pub struct CreateAccountRequest {
name: String,
account_type: i64,
}
#[derive(Serialize)]
pub struct CreateAccountResponse {
id: i64,
name: String,
account_type: i64,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", post(create_account).get(get_all_accounts))
.route("/{id}", post(update_account).get(get_account))
}
#[debug_handler]
pub async fn create_account(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateAccountRequest>,
) -> Result<Json<db_model::Account>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let new_account = db_model::AccountForm {
name: payload.name,
account_type: payload.account_type,
uid: uid,
};
let res = conn
.interact(move |conn| {
diesel::insert_into(schema::accounts::table)
.values(&new_account)
.returning(db_model::Account::as_returning())
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn update_account(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateAccountRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let res = conn
.interact(move |conn| {
diesel::update(schema::accounts::table)
.filter(schema::accounts::id.eq(id))
.filter(schema::accounts::uid.eq(uid))
.set((
schema::accounts::name.eq(payload.name),
schema::accounts::account_type.eq(payload.account_type),
schema::accounts::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_account(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Account>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::accounts::table
.filter(schema::accounts::id.eq(id))
.filter(schema::accounts::uid.eq(uid))
.select(db_model::Account::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_accounts(
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<Vec<db_model::Account>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::accounts::table
.filter(schema::accounts::uid.eq(uid))
.select(db_model::Account::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

View File

@@ -1,145 +0,0 @@
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::prelude::*;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::model::db_model;
use crate::model::schema;
use crate::util;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
#[derive(Deserialize)]
pub struct CreateBookRequest {
name: String,
}
#[derive(Serialize)]
pub struct CreateBookResponse {
id: i64,
name: String,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", post(create_book).get(get_all_books))
.route("/{id}", post(update_book).get(get_book))
}
#[debug_handler]
pub async fn create_book(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateBookRequest>,
) -> Result<Json<db_model::Book>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let new_book = db_model::BookForm {
name: payload.name,
uid,
};
let res = conn
.interact(move |conn| {
diesel::insert_into(schema::books::table)
.values(&new_book)
.returning(db_model::Book::as_returning())
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn update_book(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateBookRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let res = conn
.interact(move |conn| {
diesel::update(schema::books::table)
.filter(schema::books::id.eq(id))
.filter(schema::books::uid.eq(uid))
.set((
schema::books::name.eq(payload.name),
schema::books::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_book(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Book>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::books::table
.filter(schema::books::id.eq(id))
.filter(schema::books::uid.eq(uid))
.select(db_model::Book::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_books(
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<Vec<db_model::Book>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::books::table
.filter(schema::books::uid.eq(uid))
.select(db_model::Book::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

View File

@@ -1,267 +0,0 @@
use std::cell::RefCell;
use std::rc::Rc;
use std::sync::Arc;
// use std::sync::Arc;
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::prelude::*;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::model::db_model;
use crate::model::schema;
use crate::model::schema::categories::parent_category_id;
use crate::util;
// use crate::model::schema::categories::dsl::categories;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
use crate::model::db_model::Category;
use crate::util::operation::{
EntityType, ENTITY_CATEGORY,
ActionType, ACTION_CREATE, ACTION_UPDATE, ACTION_DELETE,
};
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", post(create_category).get(get_all_categories))
.route("/{id}", post(update_category).get(get_category))
}
#[derive(Deserialize)]
pub struct CreateCategoryRequest {
name: String,
level: String,
parent_category_id: String,
book_id: String,
}
#[derive(Serialize)]
pub struct CreateCategoryResponse {
id: i64,
}
#[debug_handler]
pub async fn create_category(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateCategoryRequest>,
) -> Result<Json<CreateCategoryResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let level: i32 = match payload.level.parse() {
Ok(level) => level,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid level".to_string(),
))
}
};
let parent_cid: i64 = match payload.parent_category_id.parse(){
Ok(id) => id,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid parent_category_id".to_string(),
))
}
};
let book_id: i64 = match payload.book_id.parse() {
Ok(id) => id,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid book_id".to_string(),
))
}
};
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let new_category = db_model::CategoryForm {
name: payload.name,
uid: uid,
level: level,
parent_category_id: parent_cid,
book_id: book_id,
};
let new_operation = db_model::CreateOperation{
uid: uid,
entity_type: ENTITY_CATEGORY,
entity_id: 0,
action: ACTION_CREATE,
};
let mut create_response = CreateCategoryResponse{
id: 0,
};
// Check if book exists under current user
let book_exists = conn
.interact(move |conn| {
schema::books::table
.select(diesel::dsl::exists(db_model::Category.as_select().filter(schema::books::id.eq(book_id))))
// .filter(schema::books::uid.eq(uid))(schema::books::id.eq(book_id))))
.get_result::<bool>(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
if !book_exists {
return Err((StatusCode::NOT_FOUND, "Book not found for the user".to_string()));
}
let cuid = uid;
let create_result = conn
.interact(move |conn| {
conn.transaction(|conn| {
let category = diesel::insert_into(schema::categories::table)
.values(&new_category)
.returning(db_model::Category::as_returning())
.get_result(conn)?;
let operation = diesel::insert_into(schema::operations::table)
.values(&new_operation)
.returning(db_model::Operation::as_returning())
.get_result(conn)?;
diesel::update(schema::categories::table)
.filter(schema::categories::id.eq(category.id))
.filter(schema::categories::uid.eq(cuid))
.set((schema::categories::op_id.eq(operation.id)))
.execute(conn)?;
diesel::update(schema::operations::table)
.filter(schema::operations::id.eq(operation.id))
.filter(schema::operations::uid.eq(cuid))
.set((schema::operations::entity_id.eq(category.id)))
.execute(conn)?;
diesel::result::QueryResult::Ok((category.id))
})
// diesel::insert_into(schema::categories::table)
// .values(&new_category)
// .returning(db_model::Category::as_returning())
// .get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
create_response.id = create_result;
Ok(Json(create_response))
}
pub async fn update_category(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateCategoryRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let level: i32 = match payload.level.parse() {
Ok(level) => level,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid level".to_string(),
))
}
};
let parent_cid: i64 = match payload.parent_category_id.parse(){
Ok(id) => id,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid parent_category_id".to_string(),
))
}
};
let book_id: i64 = match payload.book_id.parse() {
Ok(id) => id,
Err(_) => {
return Err((
StatusCode::BAD_REQUEST,
"Invalid book_id".to_string(),
))
}
};
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let res = conn
.interact(move |conn| {
diesel::update(schema::categories::table)
.filter(schema::categories::id.eq(id))
.filter(schema::categories::uid.eq(uid))
.set((
schema::categories::name.eq(payload.name),
schema::categories::level.eq(level),
schema::categories::parent_category_id.eq(parent_cid),
schema::categories::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
// let ret = CreateCategoryResponse{id: res.id, name: res.name};
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_category(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Category>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::categories::table
.filter(schema::categories::id.eq(id))
.filter(schema::categories::uid.eq(uid))
.select(db_model::Category::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_categories(
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<Vec<db_model::Category>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::categories::table
.filter(schema::categories::uid.eq(uid))
.select(db_model::Category::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

View File

@@ -1,81 +0,0 @@
use diesel::prelude::*;
use serde::{Serialize, Deserialize};
use axum::{extract::{Path, State, Query}, http::StatusCode, Json, Router};
use axum::routing::get;
use diesel::dsl::max;
use crate::model::{db_model, schema};
use crate::middleware::auth::Claims;
use crate::model::db_model::Operation;
use crate::util;
#[derive(Serialize)]
pub struct GetOperationsResponse {
start: i64,
end: i64,
total: i64,
operations: Vec<db_model::Operation>,
}
#[derive(Deserialize)]
pub struct GetOperationsParam {
start: i64,
limit: i32,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", get(get_operations))
}
// get_single_operation
pub async fn get_operations(
query_param: Query<GetOperationsParam>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<GetOperationsResponse>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let start: i64 = match query_param.start {
..0 => 0,
_ => query_param.start
};
let limit: i32 = match query_param.limit {
..0 => 0,
crate::model::req::MAX_QUERY_LIMIT.. => crate::model::req::MAX_QUERY_LIMIT,
_ => query_param.limit
};
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let mut res = conn
.interact(move |conn| {
schema::operations::table
.filter(schema::operations::uid.eq(uid))
.filter(schema::operations::id.ge(start))
.limit(limit as i64)
.select(Operation::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
res.sort_by(|a,b| a.id.cmp(&b.id));
let res_start: i64 = match res.first() {
Some(r) => r.id,
None => 0,
};
let res_end = match res.last() {
Some(r) => r.id,
None => 0,
};
let resp = GetOperationsResponse{
start: res_start,
end: res_end,
total: res.len() as i64,
operations: res,
};
Ok(Json(resp))
// Ok(Json(res))
}

View File

@@ -1,157 +0,0 @@
// use std::sync::Arc;
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::prelude::*;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::model::db_model;
use crate::model::schema;
use crate::util;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
#[derive(Deserialize)]
pub struct CreateTagRequest {
book_id: i64,
name: String,
level: i32,
parent_tag_id: i64,
}
#[derive(Serialize)]
pub struct CreateTagResponse {
id: i64,
name: String,
book_id: i64,
level: i32,
parent_tag_id: i64,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/", post(create_tag).get(get_all_tags))
.route("/{id}", post(update_tag).get(get_tag))
}
#[debug_handler]
pub async fn create_tag(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateTagRequest>,
) -> Result<Json<db_model::Tag>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let new_tag = db_model::TagForm {
book_id:payload.book_id,
name: payload.name,
uid: uid,
level: payload.level,
parent_tag_id: payload.parent_tag_id,
};
let res = conn
.interact(move |conn| {
diesel::insert_into(schema::tags::table)
.values(&new_tag)
.returning(db_model::Tag::as_returning())
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn update_tag(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<CreateTagRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let res = conn
.interact(move |conn| {
diesel::update(schema::tags::table)
.filter(schema::tags::id.eq(id))
.filter(schema::tags::uid.eq(uid))
.set((
schema::tags::name.eq(payload.name),
schema::tags::level.eq(payload.level),
schema::tags::parent_tag_id.eq(payload.parent_tag_id),
schema::tags::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_tag(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Tag>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::tags::table
.filter(schema::tags::id.eq(id))
.filter(schema::tags::uid.eq(uid))
.select(db_model::Tag::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_tags(
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<Vec<db_model::Tag>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::tags::table
.filter(schema::tags::uid.eq(uid))
.select(db_model::Tag::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

View File

@@ -1,503 +0,0 @@
use axum::extract::Query;
use axum::routing::{get, post};
use axum::{
extract::{Path, State},
http::StatusCode,
Json, Router,
};
use axum_macros::debug_handler;
use diesel::dsl::exists;
use diesel::prelude::*;
use std::fmt;
use std::i64::MAX;
use chrono::ParseResult;
// use diesel::update;
use serde::{Deserialize, Serialize};
// use serde_json::to_string;
use crate::middleware::auth;
use crate::middleware::auth::Claims;
use crate::model::{db_model,schema,req};
use crate::util;
use crate::util::req::CommonResp;
use chrono::prelude::*;
use tracing::info;
use crate::model::req::{GetAmountByTransactionRangeParams, GetAmountParams, MAX_QUERY_LIMIT};
const PAYMENT_STORE_EXPO: i64 = 5;
#[derive(Deserialize)]
pub struct SubmitTransactionRequest {
description: String,
book_id: i64,
category_id: i64,
tag_ids: Vec<i64>,
time: String, // RFC 3339 "2020-04-12T22:10:57+02:00"
amounts: Vec<SubmitTransactionAmountRequest>,
}
#[derive(Deserialize)]
pub struct SubmitTransactionAmountRequest {
account_id: i64,
payment: String,
expo: i32,
currency: String,
}
#[derive(Serialize)]
pub struct CreateTransactionResponse {
pub id: i64,
pub book_id: i64,
pub description: String,
pub category_id: i64,
pub time: chrono::DateTime<Utc>,
pub tag_ids: Vec<i64>,
pub amount_ids: Vec<i64>,
}
#[derive(Deserialize)]
pub struct BatchGetTransactionRequest {
pub transaction_ids: Vec<i64>,
}
#[derive(Deserialize)]
pub struct BatchGetTransactionAmountRequest {
pub transaction_ids: Vec<i64>,
}
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/entry/batch_get", post(batch_get_transactions))
.route(
"/entry",
post(create_transaction) // create new transaction entry with amount
.get(get_all_transactions),// get all transactions with entry
)
.route("/entry/{id}", get(get_transaction)) // get transaction entry
.route("/amount/by_transaction_id", get(get_amounts_by_tid))
.route("/amount/batch_get_by_transaction_id", post(batch_get_amounts_by_tid))
.route("/amount", get(get_all_amounts_by_tid_range))
// .route("/entry/amount/:id", post(update_amount).get(get_amount)) // require query param tid=transaction_id
}
// implementation, or do something in between.
#[derive(Debug, Clone)]
struct TransactionError;
impl fmt::Display for TransactionError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "invalid transaction insert result")
}
}
#[debug_handler]
pub async fn create_transaction(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<SubmitTransactionRequest>,
) -> Result<String, (StatusCode, String)> {
// ) -> Result<Json<db_model::Transaction>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
// 1. check related ids
// 1.1 check book id
if payload.book_id <= 0 {
return Err((StatusCode::BAD_REQUEST, "invalid book id".to_string()));
}
let check_book = conn
.interact(move |conn| {
diesel::select(exists(
schema::books::table
.filter(schema::books::uid.eq(uid))
.filter(schema::books::id.eq(payload.book_id)),
))
.get_result::<bool>(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
println!("book valid: {}", check_book);
if !check_book {
return Err((StatusCode::BAD_REQUEST, "invalid book id".to_string()));
}
// 1.2 check category id
if payload.category_id <= 0 {
return Err((StatusCode::BAD_REQUEST, "invalid category id".to_string()));
}
let check_category = conn
.interact(move |conn| {
diesel::select(exists(
schema::categories::table
.filter(schema::categories::uid.eq(uid))
.filter(schema::categories::id.eq(payload.category_id)),
))
.get_result::<bool>(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
println!("category valid: {}", check_category);
if !check_category {
return Err((StatusCode::BAD_REQUEST, "invalid category id".to_string()));
}
// 1.3 check tag ids
let payload_tag_size = payload.tag_ids.len() as i64;
let mut check_tag = payload_tag_size == 0;
if !check_tag {
let check_tag_count = conn
.interact(move |conn| {
schema::tags::table
.filter(schema::tags::uid.eq(uid))
.filter(schema::tags::id.eq_any(payload.tag_ids))
.select(diesel::dsl::count(schema::tags::id))
.first(conn)
.map(|x: i64| x as i64)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
println!("check tag: {}", check_tag_count);
check_tag = check_tag_count == payload_tag_size;
}
println!("tag valid: {}", check_tag);
if !check_tag {
return Err((StatusCode::BAD_REQUEST, "invalid tag ids".to_string()));
}
// 1.4 check account
let mut check_amount = true;
let mut amounts: Vec<db_model::AmountForm> = Vec::new();
for amount_req in payload.amounts {
// Parse and check payment
let parse_payment_result =
util::math::parse_payment_to_value_expo(amount_req.payment.clone(), PAYMENT_STORE_EXPO);
let value: i64;
let expo: i64;
match parse_payment_result {
Ok((val, expon)) => {
value = val;
expo = expon;
}
Err(_) => {
break;
}
}
let amount = db_model::AmountForm {
uid: uid,
account_id: amount_req.account_id,
transaction_id: 0,
value: value,
expo: expo,
currency: amount_req.currency.clone(),
};
check_amount = check_amount && true;
amounts.push(amount);
}
if !check_amount || amounts.len() == 0 {
return Err((StatusCode::BAD_REQUEST, "invalid amount".to_string()));
}
// 2. build and insert into db
let datetime_tz = chrono::DateTime::parse_from_rfc3339(payload.time.as_str());
let datetime = match datetime_tz {
Ok(dt) => dt,
Err(_) => {
return Err((StatusCode::BAD_REQUEST, "invalid datetime, must be RFC 3339".to_string()))
}
};
let datetime_utc = datetime.with_timezone(&Utc);
let mut transaction_resp: CreateTransactionResponse;
let mut amount_ids: Vec<i64> = Vec::new();
let transaction = conn
.interact(move |conn| {
conn.transaction(|conn| {
let new_transaction = db_model::TransactionForm {
id: None,
uid: uid,
book_id: payload.book_id,
description: payload.description,
category_id: payload.category_id,
// time: payload
time: datetime_utc,
};
let inserted_transactions = diesel::insert_into(schema::transactions::table)
.values(&new_transaction)
.returning(db_model::Transaction::as_returning())
.get_results(conn);
let mut new_tr_vec: Vec<db_model::Transaction>;
match inserted_transactions {
Ok(tr) => new_tr_vec = tr,
Err(e) => {
return diesel::result::QueryResult::Err(e);
}
}
let mut new_tid = 0 as i64;
let new_tr = new_tr_vec.get(0);
match new_tr {
Some(tr) =>new_tid = tr.id,
None => new_tid = 0,
}
if new_tid <= 0 {
return diesel::result::QueryResult::Err(diesel::result::Error::NotFound);
}
for amount in amounts.iter_mut() {
amount.transaction_id = new_tid;
}
let inserted_amounts = diesel::insert_into(schema::amounts::table)
.values(&amounts)
.returning(db_model::Amount::as_returning())
.get_results(conn);
let new_amounts: Vec<db_model::Amount> = match inserted_amounts {
Ok(ams) => ams,
Err(_) => Vec::new(),
};
for am in new_amounts {
amount_ids.push(am.id)
};
diesel::result::QueryResult::Ok(())
})
})
.await
.map_err(util::req::internal_error)?;
// 3. build response data.
// Ok(Json(res))
Ok("finish".to_string())
}
pub async fn update_transaction(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<SubmitTransactionRequest>,
) -> Result<Json<CommonResp>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let now = Utc::now().naive_utc();
let datetime_tz = chrono::DateTime::parse_from_rfc3339(payload.time.as_str());
let datetime = match datetime_tz {
Ok(dt) => dt,
Err(_) => {
return Err((StatusCode::BAD_REQUEST, "invalid datetime, must be RFC 3339".to_string()))
}
};
let datetime_utc = datetime.with_timezone(&Utc);
let res = conn
.interact(move |conn| {
diesel::update(schema::transactions::table)
.filter(schema::transactions::id.eq(id))
.filter(schema::transactions::uid.eq(uid))
.set((
schema::transactions::category_id.eq(payload.category_id),
schema::transactions::description.eq(payload.description),
schema::transactions::time.eq(datetime_utc),
schema::transactions::update_at.eq(now),
))
.execute(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
let resp = util::req::CommonResp { code: 0 };
Ok(Json(resp))
}
pub async fn get_transaction(
Path(id): Path<i64>,
State(app_state): State<crate::AppState>,
claims: Claims,
) -> Result<Json<db_model::Transaction>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn
.interact(move |conn| {
schema::transactions::table
.filter(schema::transactions::id.eq(id))
.filter(schema::transactions::uid.eq(uid))
.select(db_model::Transaction::as_select())
.limit(1)
.get_result(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_transactions(
State(app_state): State<crate::AppState>,
claims: Claims,
Query(queryParams): Query<req::GetTransactionsQueryParams>,
) -> Result<Json<Vec<db_model::Transaction>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let offset = match queryParams.start {
None => {0}
Some(start) => if start > 0 {start-1} else {0}
};
let limit = match queryParams.limit {
None => {1 as i32}
Some(limit_num) => {
if(limit_num > req::MAX_QUERY_LIMIT) {
req::MAX_QUERY_LIMIT
} else if(limit_num < 1) {
1 as i32
} else {
limit_num
}
}
};
let res = conn
.interact(move |conn| {
schema::transactions::table.filter(schema::transactions::uid.eq(uid))
.offset(offset)
.limit(limit as i64)
.select(db_model::Transaction::as_select())
.load(conn)
})
.await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn batch_get_transactions(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<BatchGetTransactionRequest>,
) -> Result<Json<Vec<db_model::Transaction>>, (StatusCode, String)> {
let uid = claims.uid.clone();
if payload.transaction_ids.len() == 0 {
return Err((StatusCode::BAD_REQUEST, "no transaction_id list".to_string()));
}
let conn = app_state.db.get().await.map_err(util::req::internal_error)?;
let res = conn.interact(move |conn| {
schema::transactions::table
.filter(schema::transactions::uid.eq(uid))
.filter(schema::transactions::is_delete.eq(false))
.filter(schema::transactions::id.eq_any(payload.transaction_ids))
.select(db_model::Transaction::as_select())
.load(conn)
})
.await.map_err(util::req::internal_error)?.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_amounts_by_tid(
State(app_state): State<crate::AppState>,
claims: Claims,
Query(params): Query<GetAmountParams>,
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
info!(params.transaction_id);
let tid = match params.transaction_id {
None => 0,
Some(idx) => idx,
};
let uid: i64 = claims.uid.clone();
let conn = app_state
.db
.get()
.await
.map_err(util::req::internal_error)?;
let res = conn.interact(move |conn| {
schema::amounts::table
.filter(schema::amounts::uid.eq(uid))
.filter(schema::amounts::transaction_id.eq(tid))
.select(db_model::Amount::as_select())
.load(conn)
}).await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn batch_get_amounts_by_tid(
State(app_state): State<crate::AppState>,
claims: Claims,
Json(payload): Json<BatchGetTransactionAmountRequest>,
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
let uid = claims.uid.clone();
if payload.transaction_ids.len() == 0 {
return Err((StatusCode::BAD_REQUEST, "no transaction_id list".to_string()));
}
let conn = app_state.db.get().await.map_err(util::req::internal_error)?;
let res = conn.interact(move |conn| {
schema::amounts::table
.filter(schema::amounts::uid.eq(uid))
.filter(schema::amounts::is_delete.eq(false))
.filter(schema::amounts::transaction_id.eq_any(payload.transaction_ids))
.select(db_model::Amount::as_select())
.load(conn)
})
.await.map_err(util::req::internal_error)?.map_err(util::req::internal_error)?;
Ok(Json(res))
}
pub async fn get_all_amounts_by_tid_range(
State(app_state): State<crate::AppState>,
claims: Claims,
Query(params): Query<GetAmountByTransactionRangeParams>,
) -> Result<Json<Vec<db_model::Amount>>, (StatusCode, String)> {
let uid: i64 = claims.uid.clone();
let tid_from = match params.transaction_id_from {
None => {-1}
Some(id) => {id}
};
let tid_to = match params.transaction_id_to {
None => {-1}
Some(id) => {id}
};
if uid <= 0 || tid_from <= 0 || tid_to <= 0 || tid_from > tid_to {
return Err((StatusCode::BAD_REQUEST,"invalid values".to_string()));
}
let limit: i64 = match params.limit {
None => {MAX_QUERY_LIMIT as i64}
Some(i) => {
if i <= 0 {
MAX_QUERY_LIMIT as i64
} else {
i as i64
}
}
};
let conn = app_state.db.get()
.await.map_err(util::req::internal_error)?;
let res = conn.interact(move |conn| {
schema::amounts::table
.filter(schema::amounts::uid.eq(uid))
.filter(schema::amounts::is_delete.eq(false))
.filter(schema::amounts::transaction_id.ge(tid_from))
.filter(schema::amounts::transaction_id.le(tid_to))
.limit(limit)
.select(db_model::Amount::as_select())
.load(conn)
}).await
.map_err(util::req::internal_error)?
.map_err(util::req::internal_error)?;
Ok(Json(res))
}

View File

@@ -1,107 +1,99 @@
use std::env;
use axum::{
// http::StatusCode,
// routing::{get, post},
// Json,
Router,
};
use axum::http::Method;
// use pbkdf2::password_hash::Error;
// use serde::{Deserialize, Serialize};
use tower::ServiceBuilder;
use tower_http::cors::{Any, CorsLayer};
use tower_http::trace::TraceLayer;
use tracing::info;
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
use crate::util::pass::get_pbkdf2_from_psw;
use axum::Router;
use clap::Parser;
use sea_orm::{Database, DatabaseConnection};
use serde::Deserialize;
// Project modules
mod ledger;
mod middleware;
mod api;
mod model;
mod util;
mod user;
// Passed App State
#[derive(Clone)]
pub struct AppState {
db: deadpool_diesel::postgres::Pool,
}
#[tokio::main]
async fn main() {
dotenvy::dotenv().unwrap();
tracing_subscriber::registry()
.with(tracing_subscriber::fmt::layer())
.init();
let args: Vec<String> = env::args().collect();
if args.len() <= 1 {
return;
}
// initialize db connection
let db_url = std::env::var("DATABASE_URL").unwrap();
let manager = deadpool_diesel::postgres::Manager::new(db_url, deadpool_diesel::Runtime::Tokio1);
let pool = deadpool_diesel::postgres::Pool::builder(manager)
.build()
.unwrap();
let shared_state = AppState { db: pool };
let cmd = args[1].clone();
match cmd.as_str() {
"add_user" => {
println!("adding user");
if args.len() <= 4 {
println!("insufficient arg number");
return;
}
let user = args[2].clone();
let psw = args[3].clone();
let mail = args[4].clone();
println!("adding user {}", user);
let hashed = get_pbkdf2_from_psw(psw);
let mut hash_psw = "".to_string();
match hashed {
Ok(val) => {
println!("get hash {}", val);
hash_psw=val;
}
Err(_) => {}
}
let res = user::dal::add_user(shared_state, user, hash_psw, mail)
.await;
return;
}
_ => {
println!("unknown command {}", cmd);
// initialize tracing
tracing_subscriber::fmt::init();
let cli = Cli::parse();
match cli.command {
Command::Serve { config_path } => {
if let Ok(config) = load_config(&config_path).await {
println!("Loaded config.");
println!("{},{}", config.service.host.clone(), config.service.port);
// Proceed with server initialization using `config`
start_server(&config).await;
} else {
eprintln!("Failed to load config from {}", config_path);
}
}
}
// Register routers
let cors_layer = CorsLayer::new()
.allow_methods([Method::GET, Method::POST])
.allow_origin(Any);
let global_layer = ServiceBuilder::new()
.layer(TraceLayer::new_for_http())
.layer(cors_layer);
let app = Router::new()
// V1 apis
.nest("/api/v1/category", ledger::category::get_nest_handlers())
.nest("/api/v1/tag", ledger::tag::get_nest_handlers())
.nest("/api/v1/book", ledger::book::get_nest_handlers())
.nest("/api/v1/account", ledger::account::get_nest_handlers())
.nest("/api/v1/transaction", ledger::transaction::get_nest_handlers())
.nest("/api/v1/user", user::handler::get_nest_handlers())
.nest("/api/v1/operation", ledger::operation::get_nest_handlers())
.with_state(shared_state)
.layer(global_layer);
let listener = tokio::net::TcpListener::bind("0.0.0.0:8987").await.unwrap();
info!("starting server on 0.0.0.0:8987");
axum::serve(listener, app).await.unwrap();
}
#[derive(Clone)]
struct AppState {
conn: DatabaseConnection,
}
#[derive(Deserialize)]
struct Key {
jwt: String,
user: String,
}
#[derive(Deserialize)]
struct DatabaseConf {
connection: String,
}
#[derive(Deserialize)]
struct ServiceConf {
host: String,
port: u32,
}
#[derive(Deserialize)]
struct Config {
service: ServiceConf,
database: DatabaseConf,
keys: Key,
}
#[derive(clap::Parser)]
struct Cli {
#[command(subcommand)]
command: Command,
}
#[derive(clap::Subcommand)]
enum Command {
Serve {
#[arg(long = "conf")]
config_path: String,
},
}
async fn load_config(path: &str) -> Result<Config, Box<dyn std::error::Error>> {
let content = tokio::fs::read_to_string(path).await?;
let config: Config = toml::from_str(&content)?;
Ok(config)
}
// ====== Commands ======
// start http server
async fn start_server(config: &Config){
// Define the router
// let app = Router.new()
// .nest();
let conn = Database::connect(&config.database.connection)
.await
.expect("Database connection failed.");
let state = AppState{conn };
let app = Router::new()
.nest("/api/v1/book", api::book::get_nest_handlers())
.with_state(state);
let host = config.service.host.clone();
let port = config.service.port;
let server_url = format!("{host}:{port}");
let listener = tokio::net::TcpListener::bind(&server_url).await.unwrap();
axum::serve(listener, app).await.expect("Service panic happened");
}

View File

@@ -1,116 +0,0 @@
use axum::{
extract::FromRequestParts,
http::{
request::Parts,
StatusCode,
},
Json, RequestPartsExt,
response::{IntoResponse, Response},
};
use async_trait::async_trait;
use axum_extra::{
headers::{authorization::Bearer, Authorization},
TypedHeader,
};
use serde::{Deserialize, Serialize};
use serde_json::json;
use jsonwebtoken::{decode,encode, DecodingKey, EncodingKey, Header, Validation};
use std::fmt::Display;
use once_cell::sync::Lazy;
use crate::util;
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
sub: String,
// company: String,
exp: usize,
pub uid: i64,
}
#[derive(Debug, Serialize)]
struct AuthBody {
access_token: String,
token_type: String,
}
#[derive(Debug, Deserialize)]
struct AuthPayload {
client_id: String,
client_secret: String,
}
#[derive(Debug)]
pub enum AuthError {
WrongCredentials,
MissingCredentials,
TokenCreation,
InvalidToken,
}
static KEYS: Lazy<Keys> = Lazy::new(|| {
let secret = std::env::var("JWT_SECRET").expect("JWT_SECRET must be set");
Keys::new(secret.as_bytes())
});
struct Keys {
encoding: EncodingKey,
decoding: DecodingKey,
}
impl Keys {
fn new(secret: &[u8]) -> Self {
Self {
encoding: EncodingKey::from_secret(secret),
decoding: DecodingKey::from_secret(secret),
}
}
}
impl Display for Claims {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Email: {}", self.sub)
}
}
impl AuthBody {
fn new(access_token: String) -> Self {
Self {
access_token,
token_type: "Bearer".to_string(),
}
}
}
impl<S> FromRequestParts<S> for Claims
where
S: Send + Sync,
{
type Rejection = AuthError;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
// Extract the token from the authorization header
let TypedHeader(Authorization(bearer)) = parts
.extract::<TypedHeader<Authorization<Bearer>>>()
.await
.map_err(|_| AuthError::InvalidToken)?;
// Decode the user data
let token_data = decode::<Claims>(bearer.token(), &KEYS.decoding, &Validation::default())
.map_err(|_| AuthError::InvalidToken)?;
Ok(token_data.claims)
}
}
impl IntoResponse for AuthError {
fn into_response(self) -> Response {
let (status, error_message) = match self {
AuthError::WrongCredentials => (StatusCode::UNAUTHORIZED, "Wrong credentials"),
AuthError::MissingCredentials => (StatusCode::BAD_REQUEST, "Missing credentials"),
AuthError::TokenCreation => (StatusCode::INTERNAL_SERVER_ERROR, "Token creation error"),
AuthError::InvalidToken => (StatusCode::BAD_REQUEST, "Invalid token"),
};
let body = Json(json!({
"error": error_message,
}));
(status, body).into_response()
}
}

View File

@@ -1 +0,0 @@
pub mod auth;

22
src/model/db/account.rs Normal file
View File

@@ -0,0 +1,22 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "account")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub r#type: i32,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

21
src/model/db/book.rs Normal file
View File

@@ -0,0 +1,21 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "book")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

22
src/model/db/category.rs Normal file
View File

@@ -0,0 +1,22 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "category")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub parent_id: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,6 +1,9 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
pub mod prelude;
pub mod account;
pub mod book;
pub mod category;
pub mod tag;
pub mod book;
pub mod account;
pub mod transaction;
pub mod operation;

7
src/model/db/prelude.rs Normal file
View File

@@ -0,0 +1,7 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
pub use super::account::Entity as Account;
pub use super::book::Entity as Book;
pub use super::category::Entity as Category;
pub use super::tag::Entity as Tag;
pub use super::transaction::Entity as Transaction;

21
src/model/db/tag.rs Normal file
View File

@@ -0,0 +1,21 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "tag")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub name: String,
pub uid: i64,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,25 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 1.1.11
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
#[sea_orm(table_name = "transaction")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: i64,
pub uid: i64,
pub r#type: i32,
pub book_id: i64,
pub category_id: i64,
pub description: String,
pub transaction_time: DateTimeWithTimeZone,
pub is_deleted: bool,
pub created_at: DateTime,
pub updated_at: DateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -1,225 +0,0 @@
use crate::model::schema;
use diesel::prelude::*;
use chrono::{DateTime, Utc};
use crate::model::schema::operations::entity_id;
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::categories)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Category {
#[serde(with = "string")]
pub id: i64,
uid: i64,
name: String,
level: i32,
parent_category_id: i64,
book_id: i64,
op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::categories)]
pub struct CategoryForm {
pub uid: i64,
pub name: String,
pub book_id: i64,
pub level: i32,
pub parent_category_id: i64,
}
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::tags)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Tag {
id: i64,
uid: i64,
book_id: i64,
name: String,
level: i32,
parent_tag_id: i64,
op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::tags)]
pub struct TagForm {
pub uid: i64,
pub book_id: i64,
pub name: String,
pub level: i32,
pub parent_tag_id: i64,
}
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::books)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Book {
#[serde(with = "string")]
id: i64,
uid: i64,
name: String,
op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::books)]
pub struct BookForm {
pub uid: i64,
pub name: String,
}
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::accounts)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Account {
id: i64,
uid: i64,
name: String,
account_type: i64,
op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::accounts)]
pub struct AccountForm {
pub uid: i64,
pub name: String,
pub account_type: i64,
}
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::transactions)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Transaction {
#[serde(with = "string")]
pub id: i64,
uid: i64,
#[serde(with = "string")]
pub book_id: i64,
pub description: String,
#[serde(with = "string")]
pub category_id: i64,
pub time: chrono::DateTime<Utc>,
pub op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::transactions)]
pub struct TransactionForm {
pub id: Option<i64>,
pub uid: i64,
pub book_id: i64,
pub description: String,
pub category_id: i64,
pub time: chrono::DateTime<Utc>,
}
#[derive(Queryable, Selectable, serde::Serialize, serde::Deserialize)]
#[diesel(table_name = schema::amounts)]
#[diesel(check_for_backend(diesel::pg::Pg))]
pub struct Amount {
pub id: i64,
uid: i64,
account_id: i64,
transaction_id: i64,
value: i64,
expo: i64,
currency: String,
pub op_id: i64,
#[serde(skip_serializing)]
is_delete: bool,
create_at: chrono::NaiveDateTime,
update_at: chrono::NaiveDateTime,
}
#[derive(serde::Deserialize, Insertable)]
#[diesel(table_name = schema::amounts)]
pub struct AmountForm {
pub uid: i64,
pub transaction_id: i64,
pub account_id: i64,
pub value: i64,
pub expo: i64,
pub currency: String,
}
#[derive(Queryable, Selectable, serde::Serialize)]
#[diesel(table_name = schema::users)]
pub struct User {
pub id: i64,
pub username: String,
pub password: String,
pub mail: String,
#[serde(skip_serializing)]
pub is_delete: bool,
}
#[derive(Insertable)]
#[diesel(table_name = schema::users)]
pub struct UserForm {
pub username: String,
pub password: String,
pub mail: String,
}
#[derive(Insertable,Queryable, Selectable, serde::Serialize)]
#[diesel(table_name = schema::operations)]
pub struct Operation {
pub id: i64,
pub uid: i64,
pub entity_type: i64,
pub entity_id: i64,
pub action: i64,
create_at: chrono::NaiveDateTime,
}
#[derive(Insertable)]
#[diesel(table_name = schema::operations)]
pub struct CreateOperation {
pub uid: i64,
pub entity_type: i64,
pub entity_id: i64,
pub action: i64,
}
mod string {
use std::fmt::Display;
use std::str::FromStr;
use serde::{de, Serializer, Deserialize, Deserializer};
pub fn serialize<T, S>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
where T: Display,
S: Serializer
{
serializer.collect_str(value)
}
pub fn deserialize<'de, T, D>(deserializer: D) -> Result<T, D::Error>
where T: FromStr,
T::Err: Display,
D: Deserializer<'de>
{
String::deserialize(deserializer)?.parse().map_err(de::Error::custom)
}
}

View File

@@ -0,0 +1,7 @@
use serde::Serialize;
#[derive(Serialize)]
pub struct BookItem {
pub id: i64,
pub name: String,
}

View File

@@ -0,0 +1 @@
pub mod book;

View File

@@ -1,3 +1,2 @@
pub mod db_model;
pub mod schema;
pub mod req;
pub mod db;
pub mod http_body;

View File

@@ -1,42 +0,0 @@
use std::fmt;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer};
pub const QUERY_ORDER_INCREASE:i32 = 0;
pub const QUERY_ORDER_INVERT:i32 = 1;
pub const MAX_QUERY_LIMIT:i32 =1000;
#[derive(Debug, Deserialize)]
pub struct GetAmountParams {
#[serde(default, deserialize_with="empty_string_as_none")]
pub transaction_id: Option<i64>,
}
#[derive(Debug, Deserialize)]
pub struct GetAmountByTransactionRangeParams {
pub transaction_id_from: Option<i64>,
pub transaction_id_to: Option<i64>,
pub limit: Option<i64>,
}
// Serde deserialization decorator to map empty Strings to None,
fn empty_string_as_none<'de, D, T>(de: D) -> Result<Option<T>, D::Error>
where
D: Deserializer<'de>,
T: FromStr,
T::Err: fmt::Display,
{
let opt = Option::<String>::deserialize(de)?;
match opt.as_deref() {
None | Some("") => Ok(None),
Some(s) => FromStr::from_str(s).map_err(de::Error::custom).map(Some),
}
}
#[derive(Deserialize)]
pub struct GetTransactionsQueryParams {
pub start: Option<i64>,
pub limit: Option<i32>,
}

View File

@@ -1,146 +0,0 @@
// @generated automatically by Diesel CLI.
diesel::table! {
accounts (id) {
id -> Int8,
uid -> Int8,
name -> Text,
account_type -> Int8,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
amounts (id) {
id -> Int8,
uid -> Int8,
account_id -> Int8,
transaction_id -> Int8,
value -> Int8,
expo -> Int8,
currency -> Text,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
books (id) {
id -> Int8,
uid -> Int8,
name -> Text,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
categories (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
name -> Text,
level -> Int4,
parent_category_id -> Int8,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
operation_snapshots (id) {
id -> Int8,
uid -> Int8,
max_op_id -> Int8,
create_at -> Timestamp,
}
}
diesel::table! {
operations (id) {
id -> Int8,
uid -> Int8,
entity_type -> Int8,
entity_id -> Int8,
action -> Int8,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
tags (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
name -> Text,
level -> Int4,
parent_tag_id -> Int8,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transaction_tag_rels (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
tag_id -> Int8,
op_id -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transactions (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
description -> Text,
category_id -> Int8,
op_id -> Int8,
is_delete -> Bool,
time -> Timestamptz,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
users (id) {
id -> Int8,
username -> Text,
password -> Text,
mail -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::allow_tables_to_appear_in_same_query!(
accounts,
amounts,
books,
categories,
operation_snapshots,
operations,
tags,
transaction_tag_rels,
transactions,
users,
);

View File

@@ -1,136 +0,0 @@
// @generated automatically by Diesel CLI.
diesel::table! {
accounts (id) {
id -> Int8,
uid -> Int8,
name -> Text,
account_type -> Int8,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
amounts (id) {
id -> Int8,
uid -> Int8,
account_id -> Int8,
transaction_id -> Int8,
value -> Int8,
expo -> Int8,
currency -> Text,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
books (id) {
id -> Int8,
uid -> Int8,
name -> Text,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
categories (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
name -> Text,
level -> Int4,
parent_category_id -> Int8,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
tags (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
name -> Text,
level -> Int4,
parent_tag_id -> Int8,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transaction_tag_rels (id) {
id -> Int8,
uid -> Int8,
transaction_id -> Int8,
tag_id -> Int8,
version_v1 -> Int8,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
transactions (id) {
id -> Int8,
uid -> Int8,
book_id -> Int8,
description -> Text,
category_id -> Int8,
version_v1 -> Int8,
is_delete -> Bool,
time -> Timestamptz,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
users (id) {
id -> Int8,
username -> Text,
password -> Text,
mail -> Text,
is_delete -> Bool,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::table! {
versions_v1 (id) {
id -> Int8,
uid -> Int8,
entity_type -> Int8,
entity_id -> Int8,
action -> Int8,
create_at -> Timestamp,
update_at -> Timestamp,
}
}
diesel::allow_tables_to_appear_in_same_query!(
accounts,
amounts,
books,
categories,
tags,
transaction_tag_rels,
transactions,
users,
versions_v1,
);

View File

@@ -1,118 +0,0 @@
use diesel::prelude::*;
use crate::model::{db_model, schema};
use std::error::Error;
use std::fmt::Debug;
use pbkdf2::password_hash::{PasswordHash, PasswordVerifier};
use pbkdf2::Pbkdf2;
use serde_json::json;
pub async fn add_user(app_state: crate::AppState, username: String, password: String, mail: String) -> Result<(), ()> {
let conn = app_state
.db
.get()
.await
.map_err(|_| {
println!("fail to get db connection");
()
})?;
let target_username = username.clone();
// 1. check if current username exists.
let res = conn.interact(
move |conn| {
schema::users::table
.filter(schema::users::username.eq(target_username.clone()))
.count()
.get_result::<i64>(conn)
})
.await
.map_err(|_res| {
()
})?
.map_err(|_res| {
()
})?;
println!("ret {}", res);
if res > 0 {
println!("user already exists.");
return Ok(());
}
let new_user_form = db_model::UserForm {
username: username.clone(),
password: password.clone(),
mail: mail.clone(),
};
// 2. adding user
let add_res = conn.interact(
move |conn| {
diesel::insert_into(schema::users::table)
.values(&new_user_form)
.returning(db_model::User::as_returning())
.get_result(conn)
})
.await
.map_err(|_e| {
()
})?
.map_err(|_e| {
()
})?;
let out = json!(add_res);
println!("new user {}", out.to_string());
Ok(())
}
pub async fn check_user_psw(app_state: crate::AppState, username: String, password: String) -> bool {
let conn_res = app_state
.db
.get()
.await
.map_err(|_| {
println!("fail to get db connection");
()
});
let conn = match conn_res {
Ok(res) => res,
Err(_err) => { return false; }
};
// 1. get psw hash
let query_username = username.clone();
let user_rr = conn.interact(
|conn| {
schema::users::table
.filter(schema::users::username.eq(query_username))
.select(db_model::User::as_select())
.get_results(conn)
})
.await;
let user_res = match user_rr {
Ok(res) => res,
Err(_) => return false,
};
println!("get user_res success");
let user = match user_res {
Ok(u) => u,
Err(_) => return false,
};
println!("get user success");
if user.len() != 1 {
return false;
}
println!("get uniq user success");
let cur_user = user.get(0);
let psw = match cur_user {
Some(usr) => usr.password.clone(),
None => "".to_string(),
};
println!("comparing psw, get {}, stored {}.", password.clone(), psw.clone());
let hash_res = PasswordHash::new(psw.as_str());
let hash = match hash_res {
Ok(rs) => rs,
Err(_) => return false,
};
let check_res = Pbkdf2.verify_password(password.as_bytes(), &hash);
return check_res.is_ok();
}

View File

@@ -1,29 +0,0 @@
use axum::{
extract::State, http::StatusCode, routing::post, Json, Router
};
use axum_macros::debug_handler;
use crate::middleware::auth::Claims;
use super::dal::check_user_psw;
pub fn get_nest_handlers() -> Router<crate::AppState> {
Router::new()
.route("/login", post(login))
}
#[derive(serde::Deserialize)]
pub struct LoginCredentialRequest {
pub username: String,
pub password: String,
}
#[debug_handler]
pub async fn login(
State(app_state): State<crate::AppState>,
Json(payload): Json<LoginCredentialRequest>,
) -> Result<(), (StatusCode, String)> {
let res = check_user_psw(app_state, payload.username.clone(), payload.password.clone()).await;
if !res {
return Err((StatusCode::UNAUTHORIZED, "invalid credentials".to_string()));
}
Ok(())
}

View File

@@ -1,2 +0,0 @@
pub mod dal;
pub mod handler;

View File

@@ -1,45 +0,0 @@
use regex::Regex;
pub fn parse_payment_to_value_expo(payment_str: String, target_expo: i64) -> Result<(i64, i64), ()> {
// 1. check format
let re = Regex::new(r"[1-9]{0,9}[0-9]\.[0-9]{2,6}$").unwrap();
let res_format = re.is_match(payment_str.as_str());
if !res_format {
return Err(())
}
let mut value: i64 = 0;
let mut expo : i64 = 0;
let dot_index = payment_str.find('.');
let (int_part, decimal_part) = match dot_index {
Some(pos) => (&payment_str[..pos], &payment_str[pos+1..]),
None => (payment_str.as_str(), ""),
};
let mut dec_part_padding = format!("{:0<width$}", decimal_part, width=target_expo as usize);
if dec_part_padding.len() > target_expo as usize {
let pd = &dec_part_padding[..target_expo as usize];
dec_part_padding = pd.to_string();
}
let num_str = format!("{}{}", int_part, dec_part_padding);
println!("parsed num string \"{}\"", num_str);
let num = num_str.parse::<i64>().unwrap();
let value = num;
let expo = target_expo;
Ok((value, expo))
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_payment(){
let r1 = parse_payment_to_value_expo("1.345".to_string(), 6);
assert_eq!(r1, Ok((1345000, 6)));
let r2 = parse_payment_to_value_expo("0.01".to_string(), 6);
assert_eq!(r2, Ok((10000, 6)));
let r3 = parse_payment_to_value_expo("0.10000001".to_string(), 6);
assert_eq!(r3, Err(()));
}
}

View File

@@ -1,4 +0,0 @@
pub mod req;
pub mod pass;
pub mod math;
pub mod operation;

View File

@@ -1,16 +0,0 @@
pub type EntityType = i64;
pub const ENTITY_CATEGORY: EntityType = 1;
pub const ENTITY_TAG: EntityType = 2;
pub const ENTITY_BOOK: EntityType = 3;
pub const ENTITY_ACCOUNT: EntityType = 4;
pub const ENTITY_TRANSACTION: EntityType = 5;
pub const ENTITY_AMOUNT: EntityType = 6;
pub type ActionType = i64;
pub const ACTION_CREATE: ActionType = 1;
pub const ACTION_UPDATE: ActionType = 2;
pub const ACTION_DELETE: ActionType = 3;

View File

@@ -1,16 +0,0 @@
use std::error::Error;
use pbkdf2::{
password_hash::{
rand_core::OsRng,
PasswordHash,SaltString,
},
Pbkdf2,
};
use pbkdf2::password_hash::PasswordHasher;
pub fn get_pbkdf2_from_psw(password:String) -> Result<String, pbkdf2::password_hash::Error> {
let salt = SaltString::generate(&mut OsRng);
let password_hash = Pbkdf2.hash_password(password.as_bytes(), &salt)?.to_string();
println!("{}",password_hash);
return Ok(password_hash)
}

View File

@@ -1,14 +0,0 @@
use axum::http::StatusCode;
use serde::Serialize;
#[derive(Serialize)]
pub struct CommonResp {
pub code: i64,
}
pub fn internal_error<E>(err: E) -> (StatusCode, String)
where
E: std::error::Error,
{
(StatusCode::INTERNAL_SERVER_ERROR, err.to_string())
}