Compare commits

..

1 Commits

Author SHA1 Message Date
e73d5d5f88 Attempt to migrate to sqlx 0.4.0-beta.1
There's still a ton of stuff broken. It's honestly too much to deal with right now so I'm abandoning it until it's released officially.
2020-10-28 12:25:36 -04:00
36 changed files with 2086 additions and 5974 deletions

15
.cargo/config.toml Normal file
View File

@ -0,0 +1,15 @@
# NOTE: For maximum performance, build using a nightly compiler
# If you are using rust stable, remove the "-Zshare-generics=y" below.
[target.x86_64-unknown-linux-gnu]
linker = "/usr/bin/clang"
rustflags = ["-Clink-arg=-fuse-ld=lld", "-Zshare-generics=y"]
[target.x86_64-apple-darwin]
rustflags = ["-Zshare-generics=y"]
# NOTE: you must manually install lld on windows. you can easily do this with the "scoop" package manager:
# `scoop install llvm`
[target.x86_64-pc-windows-msvc]
linker = "lld-link.exe"
rustflags = ["-Clinker=lld", "-Zshare-generics=y"]

View File

@ -1 +0,0 @@
target

2
.gitignore vendored
View File

@ -1,6 +1,6 @@
/target /target
**/*.rs.bk **/*.rs.bk
.env .env
.env.docker
Session.vim Session.vim
src/db/refinery.toml
tags tags

802
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@ -8,31 +8,25 @@ edition = "2018"
[dependencies] [dependencies]
anyhow = "1.0" anyhow = "1.0"
bincode = "1.3"
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
dotenv = "0.15" dotenv = "0.15"
http-api-problem = { version = "0.17", features = ["with-warp"] } http-api-problem = { version = "0.17", features = ["with-warp"] }
hyper = "0.13" hyper = "0.13"
lazy_static = "1.4"
listenfd = "0.3" listenfd = "0.3"
mime = "0.3"
openssl-probe = "0.1"
tokio = { version = "0.2", features = ["macros", "rt-threaded", "sync"] } tokio = { version = "0.2", features = ["macros", "rt-threaded", "sync"] }
sqlx = { version = "0.4.1", default-features = false, features = [ "runtime-tokio-rustls", "macros", "postgres", "chrono", "uuid", "ipnetwork", "json", "migrate", "offline" ] } sqlx = { version = "0.4.0-beta.1", default-features = false, features = [ "runtime-tokio", "macros", "postgres", "chrono", "uuid", "ipnetwork", "json" ] }
warp = { version = "0.2", features = ["compression", "tls"] } warp = { version = "0.2", features = ["compression"] }
refinery = { version = "0.3.0", features = [ "tokio-postgres", "tokio" ] }
barrel = { version = "0.6.5", features = [ "pg" ] }
clap = "3.0.0-beta.1"
serde = { version = "1.0", features = ["derive"] } serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0" serde_json = "1.0"
uuid = { version = "0.8", features = ["serde", "v4"] } uuid = { version = "0.8", features = ["serde", "v4"] }
ipnetwork = "0.17" ipnetwork = "0.17"
url = "2.1" url = "2.1"
async-trait = "0.1" async-trait = "0.1"
seahash = "4.0"
tracing = "0.1" tracing = "0.1"
tracing-appender = "0.1"
tracing-subscriber = "0.2" tracing-subscriber = "0.2"
tracing-futures = "0.2" tracing-futures = "0.2"
lru = "0.5" lru = "0.5"
http = "0.2" http = "0.2"
[profile.release]
lto = true

View File

@ -1,24 +0,0 @@
# -*- mode: dockerfile -*-
# You can override this `--build-arg BASE_IMAGE=...` to use different
# version of Rust or OpenSSL.
ARG BASE_IMAGE=ekidd/rust-musl-builder:nightly-2020-10-08
# Our first FROM statement declares the build environment.
FROM ${BASE_IMAGE} AS builder
# Add our source code.
ADD --chown=rust:rust . ./
ENV SQLX_OFFLINE true
# Build our application.
RUN cargo build --release
# Now, we need to build our _real_ Docker container, copying in `using-sqlx`.
FROM alpine:latest
RUN apk --no-cache add ca-certificates
COPY --from=builder \
/home/rust/src/target/x86_64-unknown-linux-musl/release/bazaar_realm_api \
/usr/local/bin/
CMD /usr/local/bin/bazaar_realm_api

173
README.md
View File

@ -1,111 +1,45 @@
# BazaarRealmAPI # BazaarRealmAPI
The API for the Bazaar Realm Skyrim mod which is responsible for storing and The API for the Bazaar Realm Skyrim mod which is responsible for storing and
serving data related to the mod to all users. serving data related to the mod to all users.
Right now, the types of data the API stores and the endpoints to access them Right now, the types of data the API stores and the endpoints to access them
are (all prefixed under `/v1`, the API version): are (all prefixed under `/v1`, the API version):
- `/owners`: Every player character that has registered with this API server. * `/owners`: Every player character that has registered with this API server.
Contains their unique api key. Owners own shops. Contains their unique api key. Owners own shops.
- `/shops`: Metadata about each shop including name, description, and who owns * `/shops`: Metadata about each shop including name, description, and who owns
it. it.
- `/interior_ref_lists`: Lists of in-game ObjectReferences that are in the * `/interior_ref_lists`: Lists of in-game ObjectReferences that are in the
interior of individual shops. When a user visits a shop, these references interior of individual shops. When a user visits a shop, these references
are loaded into the cell. are loaded into the cell.
- `/merchandise_lists`: Lists of in-game Forms that are in the merchant chest * `/merchandise_lists`: Lists of in-game Forms that are in the merchant chest
of individual shops. When a user visits a shop, these forms are loaded of individual shops. When a user visits a shop, these forms are loaded
onto the shop's shelves and are purchasable. onto the shop's shelves and are purchasable.
- `/transactions`: Allows posting a new buy or sell between an owner and a
shop's merchandise.
Bazaar Realm was designed to allow users to change the API they are using the Bazaar Realm was designed to allow users to change the API they are using the
mod under, if they wish. The API can run on a small server with minimal mod under, if they wish. The API can run on a small server with minimal
resources, which should be suitable for a group of friends to share shops resources, which should be suitable for a small group of friends to share
with each other. shops with each other.
It uses the [`warp`](https://crates.io/crates/warp) web server framework and It uses the [`warp`](https://crates.io/crates/warp) web server framework and
[`sqlx`](https://crates.io/crates/sqlx) for database queries to a [PostgreSQL [`sqlx`](https://crates.io/crates/sqlx) for database queries to a [PostgreSQL
database](https://www.postgresql.org). database](https://www.postgresql.org).
The API was designed with performance as a high priority. When it serves a
response, it also caches that response so future queries for the same data
can be returned in less than 1ms. To reduce data sent over the network,
clients can use the
[ETag](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/ETag)
headers to indicate to the server what version of the data they have cached
so the server can send a 304 response with no data if the resource hasn't
changed since the client last requested. Using the
[Accept](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept)
header, clients can also opt for the more space-efficient and faster to
deserialize [bincode](https://github.com/servo/bincode) format instead of the
JSON default.
Related projects: Related projects:
- [`BazaarRealmClient`](https://github.com/thallada/BazaarRealmClient): DLL that * [`BazaarRealmClient`](https://github.com/thallada/BazaarRealmClient): DLL that
handles requests and responses to this API handles requests and responses to this API
- [`BazaarRealmPlugin`](https://github.com/thallada/BazaarRealmPlugin): * [`BazaarRealmPlugin`](https://github.com/thallada/BazaarRealmPlugin):
[SKSE](https://skse.silverlock.org/) plugin for the mod that modifies data [SKSE](https://skse.silverlock.org/) plugin for the mod that modifies data
within the Skyrim game engine within the Skyrim game engine
- [`BazaarRealmMod`](https://github.com/thallada/BazaarRealmMod): Papyrus * [`BazaarRealmMod`](https://github.com/thallada/BazaarRealmMod): Papyrus
scripts, ESP plugin, and all other resources for the mod scripts, ESP plugin, and all other resources for the mod
## Docker Setup ## Development Setup
The easiest way to get the server up and running is using Docker.
1. Download and install [Docker Desktop](https://www.docker.com/get-started)
2. In PowerShell, cmd.exe, or a terminal run `docker pull postgres:alpine` then `docker pull thallada/bazaarrealm:latest`
3. Run (replacing `<password>` with a secure generated password):
```
docker run -d --name postgres --network=bazaarrealm --network-alias=db --env POSTGRES_DB=bazaarrealm --env POSTGRES_USER=bazaarrealm --env POSTGRES_PASSWORD=<password> postgres:alpine
```
4. Run (replacing `<password>` with what you generated in previous step):
```
docker run -d --name bazaarrealm -p 3030:3030 --network=bazaarrealm --network-alias=api --env DATABASE_URL=postgresql://bazaarrealm:<password>@db/bazaarrealm --env HOST=http://localhost:3030 thallada/bazaarrealm:latest
```
5. The server should now be available at `http://localhost:3030`.
## Docker-Compose Setup
An alternative way to set up the API, is to use `docker-compose` which can
orchestrate setting up the database and web server containers for you. This
method is more useful if you would like to make changes to the API code and
test them out.
1. Download and install [Docker Desktop](https://www.docker.com/get-started)
2. Git clone this repo into a folder of your choosing: `git clone https://github.com/thallada/BazaarRealmAPI.git`
3. Create a new file `.env.docker` in the checked out `bazaar_realm_api`
folder with the contents (replacing `<password>` with a secure generated
password):
```
DATABASE_URL="postgresql://bazaarrealm:<password>@db/bazaarrealm"
RUST_LOG="bazaar_realm_api=debug,warp=info"
HOST="http://localhost:3030"
PORT=3030
POSTGRES_DB=bazaarrealm
POSTGRES_USER=bazaarrealm
POSTGRES_PASSWORD=<password>
```
3. In the checked out repo, run: `docker-compose build`
4. Once that completes, run: `docker-compose up`
## Manual Development Setup
If you would prefer to run the server outside Docker on your host machine, do
the following steps to get everything setup.
1. Install and run postgres. 1. Install and run postgres.
2. Create postgres user and database (and add uuid extension while you're there 2. Create postgres user and database (and add uuid extension while you're there
): ):
``` ```
createuser bazaarrealm createuser bazaarrealm
createdb bazaarrealm createdb bazaarrealm
@ -123,44 +57,27 @@ postgres=# ALTER DATABASE bazaarrealm OWNER TO bazaarrealm;
\password bazaarrealm \password bazaarrealm
postgres=# CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; postgres=# CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
``` ```
3. Save password somewhere safe and then and add a `.env` file to the project 3. Save password somewhere safe and then and add a `.env` file to the project
directory with the contents: directory with the contents:
``` ```
DATABASE_URL=postgresql://bazaarrealm:<password>@localhost/bazaarrealm DATABASE_URL=postgresql://bazaarrealm:<password>@localhost/bazaarrealm
RUST_LOG="bazaar_realm_api=debug" RUST_LOG="bazaar_realm_api=debug"
HOST="http://localhost:3030" HOST="http://localhost:3030"
PORT=3030
``` ```
4. Create a new file at `src/db/refinery.toml` with the contents:
4. Install ```
[`sqlx_cli`](https://github.com/launchbadge/sqlx/tree/master/sqlx-cli) with [main]
`cargo install --version=0.1.0-beta.1 sqlx-cli --no-default-features --features postgres` db_type = "Postgres"
5. Run `sqlx migrate --source db/migrations run` which will run all the database db_host = "localhost"
migrations. db_port = "5432"
db_user = "bazaarrealm"
db_pass = "<database-password-here>"
db_name = "bazaarrealm"
```
5. Run `cargo run -- -m` which will compile the app in debug mode and run the
database migrations.
6. Run `./devserver.sh` to run the dev server (by default it listens at 6. Run `./devserver.sh` to run the dev server (by default it listens at
`127.0.0.1:3030`). Note that this runs the server in debug mode and shouldn't `127.0.0.1:3030`).
be used to serve requests from the mod. You can build the release version of
the server with `cargo build --release`.
## TLS setup
If you would like to access the server over HTTPS, you can use [Let's
Encrypt](https://letsencrypt.org/) to generate a SSL certificate and key and
provide it to the API. Once you use [certbot](https://certbot.eff.org/) to
generate the certificate and key for your domain in
`/etc/letsencrypt/live/<domain>/`, run the api server with:
```
docker run -d --name bazaarrealm --network=host --env DATABASE_URL=postgresql://bazaarrealm:<password>@localhost/bazaarrealm --env PORT=443 --HOST=https://<domain> --env TLS_CERT=/etc/letsencrypt/live/<domain>/fullchain.pem --env TLS_KEY=/etc/letsencrypt/live/<domain>/privkey.pem -v /etc/letsencrypt/:/etc/letsencrypt/ thallada/bazaarrealm:latest
```
This command assumes that you are on Linux and you have a running postgres
database already set up outside of docker. See Manual Development Setup for
database setup instructions.
The server should be accessible at your domain: `https://<domain>`.
## Testing Data ## Testing Data
@ -174,8 +91,8 @@ same one in all future requests.
``` ```
http POST "http://localhost:3030/v1/owners" @test_data\owner.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439" http POST "http://localhost:3030/v1/owners" @test_data\owner.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439"
http POST "http://localhost:3030/v1/shops" @test_data\shop.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439" http POST "http://localhost:3030/v1/shops" @test_data\shop.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439"
http PATCH "http://localhost:3030/v1/shops/1/interior_ref_list" @test_data\interior_ref_list.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439" http POST "http://localhost:3030/v1/interior_ref_lists" @test_data\interior_ref_list.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439"
http PATCH "http://localhost:3030/v1/shops/1/merchandise_list" @test_data\merchandise_list.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439" http POST "http://localhost:3030/v1/merchandise_lists" @test_data\merchandise_list.json api-key:"13e2f39c-033f-442f-b42a-7ad640d2e439"
# Then, you can test the GET endpoints # Then, you can test the GET endpoints
http GET "http://localhost:3030/v1/owners" http GET "http://localhost:3030/v1/owners"
http GET "http://localhost:3030/v1/shops" http GET "http://localhost:3030/v1/shops"
@ -183,20 +100,6 @@ http GET "http://localhost:3030/v1/interior_ref_lists"
http GET "http://localhost:3030/v1/merchandise_lists" http GET "http://localhost:3030/v1/merchandise_lists"
``` ```
## Database Migrations
Migrations are handled by `sqlx`. When the server initially starts, it will
connect to the database and check if there are any migrations in
`db/migrations` that have not yet been applied. It will apply any at that
time and then continue starting the server.
A new migration can be created by running: `sqlx migrate add <name>`.
To allow the docker container for the API to get built in CI without a
database, the `sqlx-data.json` file needs to be re-generated every time the
database schema changes or any query is updated. It can be generated with `cargo
sqlx prepare`.
## Authentication ## Authentication
I don't want to require users of Bazaar Realm to have to remember a password, I don't want to require users of Bazaar Realm to have to remember a password,
@ -206,6 +109,14 @@ unique UUID identifier instead. This is the api key that the
The api key is stored in the save game files for the player character and is The api key is stored in the save game files for the player character and is
required to be sent with any API request that modifies data. required to be sent with any API request that modifies data.
Yes, it's not the most secure solution, but I'm not convinced security is a Yes, it's not most secure solution, but I'm not convinced security is a huge
huge concern here. As long as users don't share their API key or the save concern here. As long as users don't share their API key or the save game
game files that contain it, their data should be secure. files that contain it, their data should be secure.
## Todo
* Add update endpoints.
* Add endpoints for the other models.
* Make self-contained docker container that can run the app without any setup.
* Add rate-limiting per IP address. The `tower` crate has a service that might
be useful for this.

View File

@ -1,71 +0,0 @@
CREATE TABLE IF NOT EXISTS "owners" (
"id" SERIAL PRIMARY KEY NOT NULL,
"name" VARCHAR(255) NOT NULL,
"api_key" UUID NOT NULL UNIQUE,
"ip_address" inet,
"mod_version" INTEGER NOT NULL,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE UNIQUE INDEX "owners_unique_name_and_api_key" ON "owners" ("name", "api_key");
CREATE TABLE "shops" (
"id" SERIAL PRIMARY KEY NOT NULL,
"name" VARCHAR(255) NOT NULL,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"description" TEXT,
"gold" INTEGER NOT NULL DEFAULT 0
CONSTRAINT "shop_gold_gt_zero" CHECK (gold >= 0),
"shop_type" VARCHAR(255) NOT NULL DEFAULT 'general_store',
"vendor_keywords" TEXT[] NOT NULL DEFAULT '{"VendorItemKey", "VendorNoSale"}',
"vendor_keywords_exclude" BOOLEAN NOT NULL DEFAULT true,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE UNIQUE INDEX "shops_unique_name_and_owner_id" ON "shops" ("name", "owner_id");
CREATE TABLE "interior_ref_lists" (
"id" SERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"ref_list" jsonb NOT NULL,
"shelves" jsonb NOT NULL,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE TABLE "merchandise_lists" (
"id" SERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"form_list" jsonb NOT NULL
CONSTRAINT "merchandise_quantity_gt_zero" CHECK (NOT jsonb_path_exists(form_list, '$[*].quantity ? (@ < 1)')),
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE INDEX "merchandise_lists_mod_name_and_local_form_id" ON "merchandise_lists" USING GIN (form_list jsonb_path_ops);
CREATE TABLE "vendors" (
"id" SERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"name" VARCHAR(255) NOT NULL,
"body_preset" INTEGER NOT NULL
);
CREATE UNIQUE INDEX "vendors_unique_name_and_owner_id" ON "vendors" ("name", "owner_id", "shop_id");
CREATE TABLE "transactions" (
"id" SERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"mod_name" VARCHAR(260) NOT NULL,
"local_form_id" INTEGER NOT NULL,
"name" TEXT NOT NULL,
"form_type" INTEGER NOT NULL,
"is_food" BOOLEAN NOT NULL,
"price" INTEGER NOT NULL,
"is_sell" BOOLEAN NOT NULL,
"quantity" INTEGER NOT NULL,
"amount" INTEGER NOT NULL,
"keywords" TEXT[] NOT NULL DEFAULT '{}',
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE INDEX "transactions_shop_id" ON "transactions" ("shop_id");
CREATE INDEX "transactions_owner_id" ON "transactions" ("owner_id");
CREATE INDEX "transactions_mod_name_and_local_form_id" ON "transactions" ("mod_name", "local_form_id");

View File

@ -1,7 +0,0 @@
DROP TABLE owners CASCADE;
DROP TABLE shops CASCADE;
DROP TABLE interior_ref_lists CASCADE;
DROP TABLE merchandise_lists CASCADE;
DROP TABLE transactions CASCADE;
DROP TABLE vendors CASCADE;
DROP TABLE _sqlx_migrations CASCADE;

View File

@ -1,22 +0,0 @@
version: "3.8"
services:
app:
build: .
volumes:
- cargo:/home/rust/.cargo
- target:/home/rust/src/target
env_file:
- .env.docker
ports:
- "3030:3030"
depends_on:
- db
db:
image: postgres:alpine
env_file:
- .env.docker
volumes:
cargo: {}
target: {}

View File

@ -0,0 +1,35 @@
CREATE TABLE IF NOT EXISTS "owners" (
"id" BIGSERIAL PRIMARY KEY NOT NULL,
"name" VARCHAR(255) NOT NULL,
"api_key" UUID NOT NULL UNIQUE,
"ip_address" inet,
"mod_version" INTEGER NOT NULL,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE UNIQUE INDEX "owners_unique_name_and_api_key" ON "owners" ("name", "api_key");
CREATE TABLE "shops" (
"id" BIGSERIAL PRIMARY KEY NOT NULL,
"name" VARCHAR(255) NOT NULL,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"description" TEXT,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE UNIQUE INDEX "shops_unique_name_and_owner_id" ON "shops" ("name", "owner_id");
CREATE TABLE "interior_ref_lists" (
"id" BIGSERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"ref_list" jsonb NOT NULL,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);
CREATE TABLE "merchandise_lists" (
"id" BIGSERIAL PRIMARY KEY NOT NULL,
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
"form_list" jsonb NOT NULL,
"created_at" timestamp(3) NOT NULL,
"updated_at" timestamp(3) NOT NULL
);

File diff suppressed because it is too large Load Diff

View File

@ -24,8 +24,8 @@ where
impl<K, V> Cache<K, V> impl<K, V> Cache<K, V>
where where
K: Eq + Hash + Debug + Send, K: Eq + Hash + Debug,
V: Clone + Send, V: Clone,
{ {
pub fn new(name: &str, capacity: usize) -> Self { pub fn new(name: &str, capacity: usize) -> Self {
Cache { Cache {
@ -48,7 +48,7 @@ where
} }
} }
pub async fn get<G, F>(&'static self, key: K, getter: G) -> Result<V> pub async fn get<G, F>(&self, key: K, getter: G) -> Result<V>
where where
G: Fn() -> F, G: Fn() -> F,
F: Future<Output = Result<V>>, F: Future<Output = Result<V>>,
@ -62,13 +62,8 @@ where
self.log_with_key(&key, "get: miss"); self.log_with_key(&key, "get: miss");
let value = getter().await?; let value = getter().await?;
let mut guard = self.lru_mutex.lock().await;
let to_cache = value.clone(); guard.put(key, value.clone());
tokio::spawn(async move {
let mut guard = self.lru_mutex.lock().await;
self.log_with_key(&key, "get: update cache");
guard.put(key, to_cache);
});
Ok(value) Ok(value)
} }
@ -90,10 +85,10 @@ where
impl<K> Cache<K, CachedResponse> impl<K> Cache<K, CachedResponse>
where where
K: Eq + Hash + Debug + Send, K: Eq + Hash + Debug,
{ {
pub async fn get_response<G, F, R>( pub async fn get_response<G, F, R>(
&'static self, &self,
key: K, key: K,
getter: G, getter: G,
) -> Result<CachedResponse, Rejection> ) -> Result<CachedResponse, Rejection>
@ -111,27 +106,21 @@ where
self.log_with_key(&key, "get_response: miss"); self.log_with_key(&key, "get_response: miss");
let reply = getter().await.map_err(reject_anyhow); let reply = getter().await.map_err(reject_anyhow);
Ok(match reply { let cached_response = match reply {
Ok(reply) => { Ok(reply) => CachedResponse::from_reply(reply)
let cached_response = CachedResponse::from_reply(reply) .await
.await .map_err(reject_anyhow)?,
.map_err(reject_anyhow)?;
let to_cache = cached_response.clone();
tokio::spawn(async move {
let mut guard = self.lru_mutex.lock().await;
self.log_with_key(&key, "get_response: update cache");
guard.put(key, to_cache);
});
cached_response
}
Err(rejection) => { Err(rejection) => {
self.log_with_key(&key, "get_response: getter returned rejection, not caching");
let reply = unpack_problem(rejection).await?; let reply = unpack_problem(rejection).await?;
CachedResponse::from_reply(reply) CachedResponse::from_reply(reply)
.await .await
.map_err(reject_anyhow)? .map_err(reject_anyhow)?
} }
}) };
let mut guard = self.lru_mutex.lock().await;
guard.put(key, cached_response.clone());
Ok(cached_response)
} }
pub async fn delete_response(&self, key: K) -> Option<CachedResponse> { pub async fn delete_response(&self, key: K) -> Option<CachedResponse> {

View File

@ -1,5 +1,4 @@
use anyhow::Result; use anyhow::Result;
use http::header::ETAG;
use http::{HeaderMap, HeaderValue, Response, StatusCode, Version}; use http::{HeaderMap, HeaderValue, Response, StatusCode, Version};
use hyper::body::{to_bytes, Body, Bytes}; use hyper::body::{to_bytes, Body, Bytes};
use warp::Reply; use warp::Reply;
@ -25,17 +24,6 @@ impl CachedResponse {
body: to_bytes(response.body_mut()).await?, body: to_bytes(response.body_mut()).await?,
}) })
} }
pub fn not_modified(etag: HeaderValue) -> Self {
let mut headers = HeaderMap::new();
headers.insert(ETAG, etag);
Self {
status: StatusCode::NOT_MODIFIED,
version: Version::HTTP_11,
headers,
body: Bytes::new(),
}
}
} }
impl Reply for CachedResponse { impl Reply for CachedResponse {

View File

@ -9,39 +9,19 @@ mod cached_response;
pub use cache::Cache; pub use cache::Cache;
pub use cached_response::CachedResponse; pub use cached_response::CachedResponse;
lazy_static! {
pub static ref CACHES: Caches = Caches::initialize();
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Caches { pub struct Caches {
pub owner_ids_by_api_key: Cache<Uuid, i32>, pub owner_ids_by_api_key: Cache<Uuid, i32>,
pub shop: Cache<i32, CachedResponse>, pub shop: Cache<i32, CachedResponse>,
pub shop_bin: Cache<i32, CachedResponse>,
pub owner: Cache<i32, CachedResponse>, pub owner: Cache<i32, CachedResponse>,
pub owner_bin: Cache<i32, CachedResponse>,
pub interior_ref_list: Cache<i32, CachedResponse>, pub interior_ref_list: Cache<i32, CachedResponse>,
pub interior_ref_list_bin: Cache<i32, CachedResponse>,
pub merchandise_list: Cache<i32, CachedResponse>, pub merchandise_list: Cache<i32, CachedResponse>,
pub merchandise_list_bin: Cache<i32, CachedResponse>,
pub transaction: Cache<i32, CachedResponse>,
pub transaction_bin: Cache<i32, CachedResponse>,
pub list_shops: Cache<ListParams, CachedResponse>, pub list_shops: Cache<ListParams, CachedResponse>,
pub list_shops_bin: Cache<ListParams, CachedResponse>,
pub list_owners: Cache<ListParams, CachedResponse>, pub list_owners: Cache<ListParams, CachedResponse>,
pub list_owners_bin: Cache<ListParams, CachedResponse>,
pub list_interior_ref_lists: Cache<ListParams, CachedResponse>, pub list_interior_ref_lists: Cache<ListParams, CachedResponse>,
pub list_interior_ref_lists_bin: Cache<ListParams, CachedResponse>,
pub list_merchandise_lists: Cache<ListParams, CachedResponse>, pub list_merchandise_lists: Cache<ListParams, CachedResponse>,
pub list_merchandise_lists_bin: Cache<ListParams, CachedResponse>,
pub list_transactions: Cache<ListParams, CachedResponse>,
pub list_transactions_bin: Cache<ListParams, CachedResponse>,
pub list_transactions_by_shop_id: Cache<(i32, ListParams), CachedResponse>,
pub list_transactions_by_shop_id_bin: Cache<(i32, ListParams), CachedResponse>,
pub interior_ref_list_by_shop_id: Cache<i32, CachedResponse>, pub interior_ref_list_by_shop_id: Cache<i32, CachedResponse>,
pub interior_ref_list_by_shop_id_bin: Cache<i32, CachedResponse>,
pub merchandise_list_by_shop_id: Cache<i32, CachedResponse>, pub merchandise_list_by_shop_id: Cache<i32, CachedResponse>,
pub merchandise_list_by_shop_id_bin: Cache<i32, CachedResponse>,
} }
impl Caches { impl Caches {
@ -49,31 +29,15 @@ impl Caches {
Caches { Caches {
owner_ids_by_api_key: Cache::new("owner_ids_by_api_key", 100).log_keys(false), owner_ids_by_api_key: Cache::new("owner_ids_by_api_key", 100).log_keys(false),
shop: Cache::new("shop", 100), shop: Cache::new("shop", 100),
shop_bin: Cache::new("shop_bin", 100),
owner: Cache::new("owner", 100), owner: Cache::new("owner", 100),
owner_bin: Cache::new("owner_bin", 100),
interior_ref_list: Cache::new("interior_ref_list", 100), interior_ref_list: Cache::new("interior_ref_list", 100),
interior_ref_list_bin: Cache::new("interior_ref_list_bin", 100),
merchandise_list: Cache::new("merchandise_list", 100), merchandise_list: Cache::new("merchandise_list", 100),
merchandise_list_bin: Cache::new("merchandise_list_bin", 100),
transaction: Cache::new("transaction", 100),
transaction_bin: Cache::new("transaction_bin", 100),
list_shops: Cache::new("list_shops", 100), list_shops: Cache::new("list_shops", 100),
list_shops_bin: Cache::new("list_shops_bin", 100),
list_owners: Cache::new("list_owners", 100), list_owners: Cache::new("list_owners", 100),
list_owners_bin: Cache::new("list_owners_bin", 100),
list_interior_ref_lists: Cache::new("list_interior_ref_lists", 100), list_interior_ref_lists: Cache::new("list_interior_ref_lists", 100),
list_interior_ref_lists_bin: Cache::new("list_interior_ref_lists_bin", 100),
list_merchandise_lists: Cache::new("list_merchandise_lists", 100), list_merchandise_lists: Cache::new("list_merchandise_lists", 100),
list_merchandise_lists_bin: Cache::new("list_merchandise_lists_bin", 100),
list_transactions: Cache::new("list_transaction", 100),
list_transactions_bin: Cache::new("list_transaction_bin", 100),
list_transactions_by_shop_id: Cache::new("list_transaction_by_shop_id", 100),
list_transactions_by_shop_id_bin: Cache::new("list_transaction_by_shop_id_bin", 100),
interior_ref_list_by_shop_id: Cache::new("interior_ref_list_by_shop_id", 100), interior_ref_list_by_shop_id: Cache::new("interior_ref_list_by_shop_id", 100),
interior_ref_list_by_shop_id_bin: Cache::new("interior_ref_list_by_shop_id_bin", 100),
merchandise_list_by_shop_id: Cache::new("merchandise_list_by_shop_id", 100), merchandise_list_by_shop_id: Cache::new("merchandise_list_by_shop_id", 100),
merchandise_list_by_shop_id_bin: Cache::new("merchandise_list_by_shop_id_bin", 100),
} }
} }
} }

View File

@ -0,0 +1,76 @@
use barrel::{backend::Pg, types, Migration};
pub fn migration() -> String {
let mut m = Migration::new();
m.create_table("owners", |t| {
t.add_column("id", types::primary().indexed(true));
t.add_column("name", types::varchar(255));
t.add_column("api_key", types::uuid().indexed(true));
t.add_column("ip_address", types::custom("inet").nullable(true));
t.add_column("mod_version", types::integer());
t.add_column("created_at", types::custom("timestamp(3)"));
t.add_column("updated_at", types::custom("timestamp(3)"));
t.add_index(
"owners_unique_name_and_api_key",
types::index(vec!["name", "api_key"]).unique(true),
);
});
m.create_table("shops", |t| {
t.add_column("id", types::primary().indexed(true));
t.add_column("name", types::varchar(255));
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
t.add_column("description", types::text().nullable(true));
// removing these until I figure out the plan for buying and selling
// t.add_column("is_not_sell_buy", types::boolean().default(true));
// t.add_column("sell_buy_list_id", types::integer().default(0));
// t.add_column("vendor_id", types::integer());
// t.add_column("vendor_gold", types::integer());
t.add_column("created_at", types::custom("timestamp(3)"));
t.add_column("updated_at", types::custom("timestamp(3)"));
t.add_index(
"shops_unique_name_and_owner_id",
types::index(vec!["name", "owner_id"]).unique(true),
);
});
m.create_table("merchandise_lists", |t| {
t.add_column("id", types::primary().indexed(true));
t.add_column(
"shop_id",
types::foreign("shops", "id").indexed(true).unique(true),
);
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
t.add_column("form_list", types::custom("jsonb"));
t.add_column("created_at", types::custom("timestamp(3)"));
t.add_column("updated_at", types::custom("timestamp(3)"));
});
// m.create_table("transactions", |t| {
// t.add_column("id", types::primary().indexed(true));
// t.add_column("shop_id", types::foreign("shops", "id").indexed(true));
// t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
// t.add_column("merchandise_list_id", types::foreign("merchandise_lists", "id"));
// t.add_column("customer_name", types::varchar(255));
// t.add_column("is_customer_npc", types::boolean());
// t.add_column("is_customer_buying", types::boolean());
// t.add_column("quantity", types::integer());
// t.add_column("is_void", types::boolean());
// t.add_column("created_at", types::custom("timestamp(3)"));
// });
m.create_table("interior_ref_lists", |t| {
t.add_column("id", types::primary().indexed(true));
t.add_column(
"shop_id",
types::foreign("shops", "id").indexed(true).unique(true),
);
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
t.add_column("ref_list", types::custom("jsonb"));
t.add_column("created_at", types::custom("timestamp(3)"));
t.add_column("updated_at", types::custom("timestamp(3)"));
});
m.make::<Pg>()
}

3
src/db/migrations/mod.rs Normal file
View File

@ -0,0 +1,3 @@
use refinery::include_migration_mods;
include_migration_mods!("src/db/migrations");

16
src/db/mod.rs Normal file
View File

@ -0,0 +1,16 @@
use refinery::config::Config;
mod migrations;
pub async fn migrate() {
let mut config = Config::from_file_location("src/db/refinery.toml").unwrap();
match migrations::runner().run_async(&mut config).await {
Ok(report) => {
dbg!(report.applied_migrations());
}
Err(error) => {
dbg!(error);
}
};
}

View File

@ -1,293 +0,0 @@
use anyhow::Result;
use http::StatusCode;
use hyper::body::Bytes;
use mime::Mime;
use uuid::Uuid;
use warp::reply::{with_header, with_status};
use warp::{Rejection, Reply};
use crate::caches::{CachedResponse, CACHES};
use crate::models::{InteriorRefList, ListParams, PostedInteriorRefList};
use crate::problem::reject_anyhow;
use crate::Environment;
use super::{
authenticate, check_etag, AcceptHeader, Bincode, ContentType, DataReply, DeserializedBody,
ETagReply, Json, TypedCache,
};
pub async fn get(
id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(
accept,
&CACHES.interior_ref_list_bin,
&CACHES.interior_ref_list,
);
let response = cache
.get_response(id, || async {
let interior_ref_list = InteriorRefList::get(&env.db, id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&interior_ref_list)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&interior_ref_list)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn get_by_shop_id(
shop_id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(
accept,
&CACHES.interior_ref_list_by_shop_id_bin,
&CACHES.interior_ref_list_by_shop_id,
);
let response = cache
.get_response(shop_id, || async {
let interior_ref_list = InteriorRefList::get_by_shop_id(&env.db, shop_id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&interior_ref_list)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&interior_ref_list)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list(
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<ListParams, CachedResponse>::pick_cache(
accept,
&CACHES.list_interior_ref_lists_bin,
&CACHES.list_interior_ref_lists,
);
let response = cache
.get_response(list_params.clone(), || async {
let interior_ref_lists = InteriorRefList::list(&env.db, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(ETagReply::<Bincode>::from_serializable(
&interior_ref_lists,
)?),
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&interior_ref_lists)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn create(
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: mut interior_ref_list,
content_type,
} = DeserializedBody::<PostedInteriorRefList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
interior_ref_list.owner_id = Some(owner_id);
let saved_interior_ref_list = InteriorRefList::create(interior_ref_list, &env.db)
.await
.map_err(reject_anyhow)?;
let url = saved_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&saved_interior_ref_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&saved_interior_ref_list)
.map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.list_interior_ref_lists.clear().await;
CACHES.list_interior_ref_lists_bin.clear().await;
CACHES
.interior_ref_list_by_shop_id
.delete_response(saved_interior_ref_list.shop_id)
.await;
CACHES
.interior_ref_list_by_shop_id_bin
.delete_response(saved_interior_ref_list.shop_id)
.await;
});
Ok(reply)
}
pub async fn update(
id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: interior_ref_list,
content_type,
} = DeserializedBody::<PostedInteriorRefList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let updated_interior_ref_list =
InteriorRefList::update(interior_ref_list, &env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&updated_interior_ref_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&updated_interior_ref_list)
.map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.interior_ref_list.delete_response(id).await;
CACHES.interior_ref_list_bin.delete_response(id).await;
CACHES
.interior_ref_list_by_shop_id
.delete_response(updated_interior_ref_list.shop_id)
.await;
CACHES
.interior_ref_list_by_shop_id_bin
.delete_response(updated_interior_ref_list.shop_id)
.await;
CACHES.list_interior_ref_lists.clear().await;
CACHES.list_interior_ref_lists_bin.clear().await;
});
Ok(reply)
}
pub async fn update_by_shop_id(
shop_id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: interior_ref_list,
content_type,
} = DeserializedBody::<PostedInteriorRefList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let updated_interior_ref_list =
InteriorRefList::update_by_shop_id(interior_ref_list, &env.db, owner_id, shop_id)
.await
.map_err(reject_anyhow)?;
let url = updated_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&updated_interior_ref_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&updated_interior_ref_list)
.map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES
.interior_ref_list
.delete_response(updated_interior_ref_list.id)
.await;
CACHES
.interior_ref_list_bin
.delete_response(updated_interior_ref_list.id)
.await;
CACHES
.interior_ref_list_by_shop_id
.delete_response(updated_interior_ref_list.shop_id)
.await;
CACHES
.interior_ref_list_by_shop_id_bin
.delete_response(updated_interior_ref_list.shop_id)
.await;
CACHES.list_interior_ref_lists.clear().await;
CACHES.list_interior_ref_lists_bin.clear().await;
});
Ok(reply)
}
pub async fn delete(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let interior_ref_list = InteriorRefList::get(&env.db, id)
.await
.map_err(reject_anyhow)?;
InteriorRefList::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
tokio::spawn(async move {
CACHES.interior_ref_list.delete_response(id).await;
CACHES.interior_ref_list_bin.delete_response(id).await;
CACHES
.interior_ref_list_by_shop_id
.delete_response(interior_ref_list.shop_id)
.await;
CACHES
.interior_ref_list_by_shop_id_bin
.delete_response(interior_ref_list.shop_id)
.await;
CACHES.list_interior_ref_lists.clear().await;
CACHES.list_interior_ref_lists_bin.clear().await;
});
Ok(StatusCode::NO_CONTENT)
}

View File

@ -1,290 +0,0 @@
use anyhow::Result;
use http::StatusCode;
use hyper::body::Bytes;
use mime::Mime;
use uuid::Uuid;
use warp::reply::{with_header, with_status};
use warp::{Rejection, Reply};
use crate::caches::{CachedResponse, CACHES};
use crate::models::{ListParams, MerchandiseList, PostedMerchandiseList};
use crate::problem::reject_anyhow;
use crate::Environment;
use super::{
authenticate, check_etag, AcceptHeader, Bincode, ContentType, DataReply, DeserializedBody,
ETagReply, Json, TypedCache,
};
pub async fn get(
id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(
accept,
&CACHES.merchandise_list_bin,
&CACHES.merchandise_list,
);
let response = cache
.get_response(id, || async {
let merchandise_list = MerchandiseList::get(&env.db, id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&merchandise_list)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&merchandise_list)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn get_by_shop_id(
shop_id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(
accept,
&CACHES.merchandise_list_by_shop_id_bin,
&CACHES.merchandise_list_by_shop_id,
);
let response = cache
.get_response(shop_id, || async {
let merchandise_list = MerchandiseList::get_by_shop_id(&env.db, shop_id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&merchandise_list)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&merchandise_list)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list(
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<ListParams, CachedResponse>::pick_cache(
accept,
&CACHES.list_merchandise_lists_bin,
&CACHES.list_merchandise_lists,
);
let response = cache
.get_response(list_params.clone(), || async {
let merchandise_lists = MerchandiseList::list(&env.db, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&merchandise_lists)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&merchandise_lists)?)
}
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn create(
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: mut merchandise_list,
content_type,
} = DeserializedBody::<PostedMerchandiseList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
merchandise_list.owner_id = Some(owner_id);
let saved_merchandise_list = MerchandiseList::create(merchandise_list, &env.db)
.await
.map_err(reject_anyhow)?;
let url = saved_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&saved_merchandise_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&saved_merchandise_list).map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.list_merchandise_lists.clear().await;
CACHES.list_merchandise_lists_bin.clear().await;
CACHES
.merchandise_list_by_shop_id
.delete_response(saved_merchandise_list.shop_id)
.await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(saved_merchandise_list.shop_id)
.await;
});
Ok(reply)
}
pub async fn update(
id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: merchandise_list,
content_type,
} = DeserializedBody::<PostedMerchandiseList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let updated_merchandise_list = MerchandiseList::update(merchandise_list, &env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&updated_merchandise_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&updated_merchandise_list)
.map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.merchandise_list.delete_response(id).await;
CACHES.merchandise_list_bin.delete_response(id).await;
CACHES
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES.list_merchandise_lists.clear().await;
CACHES.list_merchandise_lists_bin.clear().await;
});
Ok(reply)
}
pub async fn update_by_shop_id(
shop_id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: merchandise_list,
content_type,
} = DeserializedBody::<PostedMerchandiseList>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let updated_merchandise_list =
MerchandiseList::update_by_shop_id(merchandise_list, &env.db, owner_id, shop_id)
.await
.map_err(reject_anyhow)?;
let url = updated_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&updated_merchandise_list)
.map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&updated_merchandise_list)
.map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES
.merchandise_list
.delete_response(updated_merchandise_list.id)
.await;
CACHES
.merchandise_list_bin
.delete_response(updated_merchandise_list.id)
.await;
CACHES
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES.list_merchandise_lists.clear().await;
CACHES.list_merchandise_lists_bin.clear().await;
});
Ok(reply)
}
pub async fn delete(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let merchandise_list = MerchandiseList::get(&env.db, id)
.await
.map_err(reject_anyhow)?;
MerchandiseList::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
tokio::spawn(async move {
CACHES.merchandise_list.delete_response(id).await;
CACHES.merchandise_list_bin.delete_response(id).await;
CACHES
.merchandise_list_by_shop_id
.delete_response(merchandise_list.shop_id)
.await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(merchandise_list.shop_id)
.await;
CACHES.list_merchandise_lists.clear().await;
CACHES.list_merchandise_lists_bin.clear().await;
});
Ok(StatusCode::NO_CONTENT)
}

View File

@ -1,37 +1,24 @@
use std::fmt::Debug; use anyhow::{anyhow, Result};
use std::hash::Hash;
use std::marker::PhantomData;
use std::str::FromStr;
use anyhow::{anyhow, Error, Result};
use http::header::{HeaderValue, CONTENT_TYPE, ETAG, SERVER};
use http::StatusCode; use http::StatusCode;
use http_api_problem::HttpApiProblem; use ipnetwork::IpNetwork;
use hyper::body::Bytes; use sqlx::types::Json;
use mime::{FromStrError, Mime}; use std::net::SocketAddr;
use seahash::hash; use tracing::instrument;
use serde::{de::DeserializeOwned, Serialize};
use tracing::{debug, error, instrument, warn};
use uuid::Uuid; use uuid::Uuid;
use warp::reply::Response; use warp::reply::{json, with_header, with_status};
use warp::Reply; use warp::{Rejection, Reply};
pub mod interior_ref_list; use super::models::{
pub mod merchandise_list; InteriorRefList, ListParams, MerchandiseList, MerchandiseParams, Model, Owner, Shop,
pub mod owner; UpdateableModel,
pub mod shop; };
pub mod transaction; use super::problem::{reject_anyhow, unauthorized_no_api_key, unauthorized_no_owner};
use super::caches::{Cache, CachedResponse, CACHES};
use super::problem::{unauthorized_no_api_key, unauthorized_no_owner};
use super::Environment; use super::Environment;
pub static SERVER_STRING: &str = "BazaarRealmAPI/0.1.0";
#[instrument(level = "debug", skip(env, api_key))] #[instrument(level = "debug", skip(env, api_key))]
pub async fn authenticate(env: &Environment, api_key: Option<Uuid>) -> Result<i32> { pub async fn authenticate(env: &Environment, api_key: Option<Uuid>) -> Result<i32> {
if let Some(api_key) = api_key { if let Some(api_key) = api_key {
CACHES env.caches
.owner_ids_by_api_key .owner_ids_by_api_key
.get(api_key, || async { .get(api_key, || async {
Ok( Ok(
@ -53,210 +40,611 @@ pub async fn authenticate(env: &Environment, api_key: Option<Uuid>) -> Result<i3
} }
} }
// Similar to `warp::reply::Json`, but stores hash of body content for the ETag header created in `into_response`. pub async fn get_shop(id: i32, env: Environment) -> Result<impl Reply, Rejection> {
// Also, it does not store a serialize `Result`. Instead it returns the error to the caller immediately in `from_serializable`. env.caches
// It's purpose is to avoid serializing the body content twice and to encapsulate ETag logic in one place. .shop
pub struct ETagReply<T> { .get_response(id, || async {
body: Vec<u8>, let shop = Shop::get(&env.db, id).await?;
etag: String, let reply = json(&shop);
content_type: PhantomData<T>, let reply = with_status(reply, StatusCode::OK);
} Ok(reply)
pub trait DataReply: Reply + Sized {
fn from_serializable<T: Serialize>(val: &T) -> Result<Self>;
}
pub struct Json {}
pub struct Bincode {}
#[derive(Debug, PartialEq, Eq)]
pub enum ContentType {
Json,
Bincode,
}
impl Reply for ETagReply<Json> {
fn into_response(self) -> Response {
let mut res = Response::new(self.body.into());
res.headers_mut()
.insert(CONTENT_TYPE, HeaderValue::from_static("application/json"));
res.headers_mut()
.insert(SERVER, HeaderValue::from_static(SERVER_STRING));
if let Ok(val) = HeaderValue::from_str(&self.etag) {
res.headers_mut().insert(ETAG, val);
} else {
// This should never happen in practice since etag values should only be hex-encoded strings
warn!("omitting etag header with invalid ASCII characters")
}
res
}
}
impl DataReply for ETagReply<Json> {
fn from_serializable<T: Serialize>(val: &T) -> Result<Self> {
let bytes = serde_json::to_vec(val).map_err(|err| {
error!("Failed to serialize database value to JSON: {}", err);
anyhow!(HttpApiProblem::with_title_and_type_from_status(
StatusCode::INTERNAL_SERVER_ERROR
)
.set_detail(format!(
"Failed to serialize database value to JSON: {}",
err
)))
})?;
let etag = format!("{:x}", hash(&bytes));
Ok(Self {
body: bytes,
etag,
content_type: PhantomData,
}) })
} .await
} }
impl Reply for ETagReply<Bincode> { pub async fn list_shops(
fn into_response(self) -> Response { list_params: ListParams,
let mut res = Response::new(self.body.into()); env: Environment,
res.headers_mut().insert( ) -> Result<impl Reply, Rejection> {
CONTENT_TYPE, env.caches
HeaderValue::from_static("application/octet-stream"), .list_shops
); .get_response(list_params.clone(), || async {
res.headers_mut() let shops = Shop::list(&env.db, &list_params).await?;
.insert(SERVER, HeaderValue::from_static(SERVER_STRING)); let reply = json(&shops);
if let Ok(val) = HeaderValue::from_str(&self.etag) { let reply = with_status(reply, StatusCode::OK);
res.headers_mut().insert(ETAG, val); Ok(reply)
} else {
// This should never happen in practice since etag values should only be hex-encoded strings
warn!("omitting etag header with invalid ASCII characters")
}
res
}
}
impl DataReply for ETagReply<Bincode> {
fn from_serializable<T: Serialize>(val: &T) -> Result<Self> {
let bytes = bincode::serialize(val).map_err(|err| {
error!("Failed to serialize database value to bincode: {}", err);
anyhow!(HttpApiProblem::with_title_and_type_from_status(
StatusCode::INTERNAL_SERVER_ERROR
)
.set_detail(format!(
"Failed to serialize database value to bincode: {}",
err
)))
})?;
let etag = format!("{:x}", hash(&bytes));
Ok(Self {
body: bytes,
etag,
content_type: PhantomData,
}) })
} .await
} }
pub fn check_etag(etag: Option<String>, response: CachedResponse) -> CachedResponse { pub async fn create_shop(
if let Some(request_etag) = etag { shop: Shop,
if let Some(response_etag) = response.headers.get("etag") { api_key: Option<Uuid>,
if request_etag == *response_etag { env: Environment,
return CachedResponse::not_modified(response_etag.clone()); ) -> Result<impl Reply, Rejection> {
} let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let shop_with_owner_id = Shop {
owner_id: Some(owner_id),
..shop
};
let saved_shop = shop_with_owner_id
.create(&env.db)
.await
.map_err(reject_anyhow)?;
// also save empty interior_ref_list and merchandise_list rows
if let Some(shop_id) = saved_shop.id {
let interior_ref_list = InteriorRefList {
id: None,
shop_id,
owner_id: Some(owner_id),
ref_list: Json::default(),
created_at: None,
updated_at: None,
};
interior_ref_list
.create(&env.db)
.await
.map_err(reject_anyhow)?;
let merchandise_list = MerchandiseList {
id: None,
shop_id,
owner_id: Some(owner_id),
form_list: Json::default(),
created_at: None,
updated_at: None,
};
merchandise_list
.create(&env.db)
.await
.map_err(reject_anyhow)?;
}
let url = saved_shop.url(&env.api_url).map_err(reject_anyhow)?;
let reply = json(&saved_shop);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.list_shops.clear().await;
Ok(reply)
}
pub async fn update_shop(
id: i32,
shop: Shop,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let shop_with_id_and_owner_id = if shop.owner_id.is_some() {
// allows an owner to transfer ownership of shop to another owner
Shop {
id: Some(id),
..shop
} }
} } else {
response Shop {
id: Some(id),
owner_id: Some(owner_id),
..shop
}
};
let updated_shop = shop_with_id_and_owner_id
.update(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_shop.url(&env.api_url).map_err(reject_anyhow)?;
let reply = json(&updated_shop);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.shop.delete_response(id).await;
env.caches.list_shops.clear().await;
Ok(reply)
} }
#[derive(Debug, PartialEq)] pub async fn delete_shop(
pub struct AcceptHeader { id: i32,
mimes: Vec<Mime>, api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
Shop::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
env.caches.shop.delete_response(id).await;
env.caches.list_shops.clear().await;
env.caches
.interior_ref_list_by_shop_id
.delete_response(id)
.await;
env.caches
.merchandise_list_by_shop_id
.delete_response(id)
.await;
Ok(StatusCode::NO_CONTENT)
} }
impl FromStr for AcceptHeader { pub async fn get_owner(id: i32, env: Environment) -> Result<impl Reply, Rejection> {
type Err = Error; env.caches
.owner
fn from_str(s: &str) -> Result<Self> { .get_response(id, || async {
Ok(Self { let owner = Owner::get(&env.db, id).await?;
mimes: s let reply = json(&owner);
.split(',') let reply = with_status(reply, StatusCode::OK);
.map(|part| part.trim().parse::<Mime>()) Ok(reply)
.collect::<std::result::Result<Vec<Mime>, FromStrError>>()?,
}) })
.await
}
pub async fn list_owners(
list_params: ListParams,
env: Environment,
) -> Result<impl Reply, Rejection> {
env.caches
.list_owners
.get_response(list_params.clone(), || async {
let owners = Owner::list(&env.db, &list_params).await?;
let reply = json(&owners);
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await
}
pub async fn create_owner(
owner: Owner,
remote_addr: Option<SocketAddr>,
api_key: Option<Uuid>,
real_ip: Option<IpNetwork>,
env: Environment,
) -> Result<impl Reply, Rejection> {
if let Some(api_key) = api_key {
let owner_with_ip_and_key = match remote_addr {
Some(addr) => Owner {
api_key: Some(api_key),
ip_address: Some(IpNetwork::from(addr.ip())),
..owner
},
None => Owner {
api_key: Some(api_key),
ip_address: real_ip,
..owner
},
};
let saved_owner = owner_with_ip_and_key
.create(&env.db)
.await
.map_err(reject_anyhow)?;
let url = saved_owner.url(&env.api_url).map_err(reject_anyhow)?;
let reply = json(&saved_owner);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.list_owners.clear().await;
Ok(reply)
} else {
Err(reject_anyhow(unauthorized_no_api_key()))
} }
} }
impl AcceptHeader { pub async fn update_owner(
pub fn accepts_bincode(&self) -> bool { id: i32,
self.mimes.contains(&mime::APPLICATION_OCTET_STREAM) owner: Owner,
} api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let owner_with_id = Owner {
id: Some(id),
..owner
};
let updated_owner = owner_with_id
.update(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_owner.url(&env.api_url).map_err(reject_anyhow)?;
let reply = json(&updated_owner);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.owner.delete_response(id).await;
env.caches.list_owners.clear().await;
Ok(reply)
} }
pub struct DeserializedBody<T> { pub async fn delete_owner(
body: T, id: i32,
content_type: ContentType, api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
Owner::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
env.caches.owner.delete_response(id).await;
env.caches
.owner_ids_by_api_key
.delete(api_key.expect("api-key has been validated during authenticate"))
.await;
env.caches.list_owners.clear().await;
Ok(StatusCode::NO_CONTENT)
} }
impl<T: DeserializeOwned> DeserializedBody<T> { pub async fn get_interior_ref_list(id: i32, env: Environment) -> Result<impl Reply, Rejection> {
pub fn from_bytes(bytes: Bytes, content_type: Option<Mime>) -> Result<Self> { env.caches
match content_type { .interior_ref_list
Some(content_type) if content_type == mime::APPLICATION_OCTET_STREAM => { .get_response(id, || async {
debug!( let interior_ref_list = InteriorRefList::get(&env.db, id).await?;
content_type = ?ContentType::Bincode, let reply = json(&interior_ref_list);
"deserializing body as bincode" let reply = with_status(reply, StatusCode::OK);
); Ok(reply)
Ok(Self { })
content_type: ContentType::Bincode, .await
body: bincode::deserialize(&bytes)?, }
})
} pub async fn list_interior_ref_lists(
_ => { list_params: ListParams,
debug!( env: Environment,
content_type = ?ContentType::Json, ) -> Result<impl Reply, Rejection> {
"deserializing body as json" env.caches
); .list_interior_ref_lists
Ok(Self { .get_response(list_params.clone(), || async {
content_type: ContentType::Json, let interior_ref_lists = InteriorRefList::list(&env.db, &list_params).await?;
body: serde_json::from_slice(&bytes)?, let reply = json(&interior_ref_lists);
}) let reply = with_status(reply, StatusCode::OK);
} Ok(reply)
})
.await
}
pub async fn create_interior_ref_list(
interior_ref_list: InteriorRefList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let ref_list_with_owner_id = InteriorRefList {
owner_id: Some(owner_id),
..interior_ref_list
};
let saved_interior_ref_list = ref_list_with_owner_id
.create(&env.db)
.await
.map_err(reject_anyhow)?;
let url = saved_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&saved_interior_ref_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.list_interior_ref_lists.clear().await;
env.caches
.interior_ref_list_by_shop_id
.delete_response(saved_interior_ref_list.shop_id)
.await;
Ok(reply)
}
pub async fn update_interior_ref_list(
id: i32,
interior_ref_list: InteriorRefList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let interior_ref_list_with_id_and_owner_id = if interior_ref_list.owner_id.is_some() {
InteriorRefList {
id: Some(id),
..interior_ref_list
} }
} } else {
} InteriorRefList {
id: Some(id),
pub struct TypedCache<'a, K, V> owner_id: Some(owner_id),
where ..interior_ref_list
K: Eq + Hash + Debug,
V: Clone,
{
cache: &'a Cache<K, V>,
content_type: ContentType,
}
impl<'a, K, V> TypedCache<'a, K, V>
where
K: Eq + Hash + Debug,
V: Clone,
{
pub fn pick_cache(
accept: Option<AcceptHeader>,
bincode_cache: &'a Cache<K, V>,
json_cache: &'a Cache<K, V>,
) -> Self {
match accept {
Some(accept) if accept.accepts_bincode() => {
debug!(
content_type = ?ContentType::Bincode,
"serializing body as bincode"
);
Self {
content_type: ContentType::Bincode,
cache: bincode_cache,
}
}
_ => {
debug!(content_type = ?ContentType::Json, "serializing body as json");
Self {
content_type: ContentType::Json,
cache: json_cache,
}
}
} }
} };
let updated_interior_ref_list = interior_ref_list_with_id_and_owner_id
.update(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&updated_interior_ref_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.interior_ref_list.delete_response(id).await;
env.caches
.interior_ref_list_by_shop_id
.delete_response(updated_interior_ref_list.shop_id)
.await;
env.caches.list_interior_ref_lists.clear().await;
Ok(reply)
}
pub async fn update_interior_ref_list_by_shop_id(
shop_id: i32,
interior_ref_list: InteriorRefList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let interior_ref_list_with_owner_id = InteriorRefList {
owner_id: Some(owner_id),
..interior_ref_list
};
let updated_interior_ref_list = interior_ref_list_with_owner_id
.update_by_shop_id(&env.db, owner_id, shop_id)
.await
.map_err(reject_anyhow)?;
let url = updated_interior_ref_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&updated_interior_ref_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches
.interior_ref_list
.delete_response(
updated_interior_ref_list
.id
.expect("saved interior_ref_list has no id"),
)
.await;
env.caches
.interior_ref_list_by_shop_id
.delete_response(updated_interior_ref_list.shop_id)
.await;
env.caches.list_interior_ref_lists.clear().await;
Ok(reply)
}
pub async fn delete_interior_ref_list(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let interior_ref_list = InteriorRefList::get(&env.db, id)
.await
.map_err(reject_anyhow)?;
InteriorRefList::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
env.caches.interior_ref_list.delete_response(id).await;
env.caches.list_interior_ref_lists.clear().await;
env.caches
.interior_ref_list_by_shop_id
.delete_response(interior_ref_list.shop_id)
.await;
Ok(StatusCode::NO_CONTENT)
}
pub async fn get_interior_ref_list_by_shop_id(
shop_id: i32,
env: Environment,
) -> Result<impl Reply, Rejection> {
env.caches
.interior_ref_list_by_shop_id
.get_response(shop_id, || async {
let interior_ref_list = InteriorRefList::get_by_shop_id(&env.db, shop_id).await?;
let reply = json(&interior_ref_list);
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await
}
// TODO: probably need a way to get by shop id instead
pub async fn get_merchandise_list(id: i32, env: Environment) -> Result<impl Reply, Rejection> {
env.caches
.merchandise_list
.get_response(id, || async {
let merchandise_list = MerchandiseList::get(&env.db, id).await?;
let reply = json(&merchandise_list);
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await
}
pub async fn list_merchandise_lists(
list_params: ListParams,
env: Environment,
) -> Result<impl Reply, Rejection> {
env.caches
.list_merchandise_lists
.get_response(list_params.clone(), || async {
let merchandise_lists = MerchandiseList::list(&env.db, &list_params).await?;
let reply = json(&merchandise_lists);
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await
}
pub async fn create_merchandise_list(
merchandise_list: MerchandiseList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let ref_list_with_owner_id = MerchandiseList {
owner_id: Some(owner_id),
..merchandise_list
};
let saved_merchandise_list = ref_list_with_owner_id
.create(&env.db)
.await
.map_err(reject_anyhow)?;
let url = saved_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&saved_merchandise_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.list_merchandise_lists.clear().await;
env.caches
.merchandise_list_by_shop_id
.delete_response(saved_merchandise_list.shop_id)
.await;
Ok(reply)
}
pub async fn update_merchandise_list(
id: i32,
merchandise_list: MerchandiseList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let merchandise_list_with_id_and_owner_id = if merchandise_list.owner_id.is_some() {
MerchandiseList {
id: Some(id),
..merchandise_list
}
} else {
MerchandiseList {
id: Some(id),
owner_id: Some(owner_id),
..merchandise_list
}
};
let updated_merchandise_list = merchandise_list_with_id_and_owner_id
.update(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&updated_merchandise_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches.merchandise_list.delete_response(id).await;
env.caches
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
env.caches.list_merchandise_lists.clear().await;
Ok(reply)
}
pub async fn update_merchandise_list_by_shop_id(
shop_id: i32,
merchandise_list: MerchandiseList,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let merchandise_list_with_owner_id = MerchandiseList {
owner_id: Some(owner_id),
..merchandise_list
};
let updated_merchandise_list = merchandise_list_with_owner_id
.update_by_shop_id(&env.db, owner_id, shop_id)
.await
.map_err(reject_anyhow)?;
let url = updated_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&updated_merchandise_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches
.merchandise_list
.delete_response(
updated_merchandise_list
.id
.expect("saved merchandise_list has no id"),
)
.await;
env.caches
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
env.caches.list_merchandise_lists.clear().await;
Ok(reply)
}
pub async fn delete_merchandise_list(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let merchandise_list = MerchandiseList::get(&env.db, id)
.await
.map_err(reject_anyhow)?;
MerchandiseList::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
env.caches.merchandise_list.delete_response(id).await;
env.caches
.merchandise_list_by_shop_id
.delete_response(merchandise_list.shop_id)
.await;
env.caches.list_merchandise_lists.clear().await;
Ok(StatusCode::NO_CONTENT)
}
pub async fn get_merchandise_list_by_shop_id(
shop_id: i32,
env: Environment,
) -> Result<impl Reply, Rejection> {
env.caches
.merchandise_list_by_shop_id
.get_response(shop_id, || async {
let merchandise_list = MerchandiseList::get_by_shop_id(&env.db, shop_id).await?;
let reply = json(&merchandise_list);
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await
}
pub async fn buy_merchandise(
shop_id: i32,
merchandise_params: MerchandiseParams,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
// TODO: create transaction
let updated_merchandise_list = MerchandiseList::update_merchandise_quantity(
&env.db,
shop_id,
&(merchandise_params.mod_name),
merchandise_params.local_form_id,
merchandise_params.quantity_delta,
)
.await
.map_err(reject_anyhow)?;
let url = updated_merchandise_list
.url(&env.api_url)
.map_err(reject_anyhow)?;
let reply = json(&updated_merchandise_list);
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
env.caches
.merchandise_list
.delete_response(
updated_merchandise_list
.id
.expect("saved merchandise_list has no id"),
)
.await;
env.caches
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
env.caches.list_merchandise_lists.clear().await;
Ok(reply)
} }

View File

@ -1,171 +0,0 @@
use anyhow::Result;
use http::StatusCode;
use hyper::body::Bytes;
use ipnetwork::IpNetwork;
use mime::Mime;
use std::net::SocketAddr;
use uuid::Uuid;
use warp::reply::{with_header, with_status};
use warp::{Rejection, Reply};
use crate::caches::{CachedResponse, CACHES};
use crate::models::{FullPostedOwner, ListParams, Owner, PostedOwner};
use crate::problem::{reject_anyhow, unauthorized_no_api_key};
use crate::Environment;
use super::{
authenticate, check_etag, AcceptHeader, Bincode, ContentType, DataReply, DeserializedBody,
ETagReply, Json, TypedCache,
};
pub async fn get(
id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(accept, &CACHES.owner_bin, &CACHES.owner);
let response = cache
.get_response(id, || async {
let owner = Owner::get(&env.db, id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(ETagReply::<Bincode>::from_serializable(&owner)?),
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&owner)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list(
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<ListParams, CachedResponse>::pick_cache(
accept,
&CACHES.list_owners_bin,
&CACHES.list_owners,
);
let response = cache
.get_response(list_params.clone(), || async {
let owners = Owner::list(&env.db, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(ETagReply::<Bincode>::from_serializable(&owners)?),
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&owners)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn create(
bytes: Bytes,
remote_addr: Option<SocketAddr>,
api_key: Option<Uuid>,
real_ip: Option<IpNetwork>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
if let Some(api_key) = api_key {
let DeserializedBody {
body: owner,
content_type,
} = DeserializedBody::<PostedOwner>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner = FullPostedOwner {
name: owner.name,
mod_version: owner.mod_version,
api_key,
ip_address: match remote_addr {
Some(addr) => Some(IpNetwork::from(addr.ip())),
None => real_ip,
},
};
let saved_owner = Owner::create(owner, &env.db).await.map_err(reject_anyhow)?;
let url = saved_owner.url(&env.api_url).map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&saved_owner).map_err(reject_anyhow)?,
),
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&saved_owner).map_err(reject_anyhow)?)
}
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.list_owners.clear().await;
CACHES.list_owners_bin.clear().await;
});
Ok(reply)
} else {
Err(reject_anyhow(unauthorized_no_api_key()))
}
}
pub async fn update(
id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: owner,
content_type,
} = DeserializedBody::<PostedOwner>::from_bytes(bytes, content_type).map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
let updated_owner = Owner::update(owner, &env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_owner.url(&env.api_url).map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&updated_owner).map_err(reject_anyhow)?,
),
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&updated_owner).map_err(reject_anyhow)?)
}
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.owner.delete_response(id).await;
CACHES.owner_bin.delete_response(id).await;
CACHES.list_owners.clear().await;
CACHES.list_owners_bin.clear().await;
});
Ok(reply)
}
pub async fn delete(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
Owner::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
tokio::spawn(async move {
let api_key = api_key.expect("api-key has been validated during authenticate");
CACHES.owner.delete_response(id).await;
CACHES.owner_bin.delete_response(id).await;
CACHES.owner_ids_by_api_key.delete(api_key).await;
CACHES.list_owners.clear().await;
CACHES.list_owners_bin.clear().await;
});
Ok(StatusCode::NO_CONTENT)
}

View File

@ -1,202 +0,0 @@
use anyhow::{anyhow, Result};
use http::StatusCode;
use hyper::body::Bytes;
use mime::Mime;
use uuid::Uuid;
use warp::reply::{with_header, with_status};
use warp::{Rejection, Reply};
use crate::caches::{CachedResponse, CACHES};
use crate::models::{
InteriorRefList, ListParams, MerchandiseList, PostedInteriorRefList, PostedMerchandiseList,
PostedShop, Shop,
};
use crate::problem::reject_anyhow;
use crate::Environment;
use super::{
authenticate, check_etag, AcceptHeader, Bincode, ContentType, DataReply, DeserializedBody,
ETagReply, Json, TypedCache,
};
pub async fn get(
id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(accept, &CACHES.shop_bin, &CACHES.shop);
let response = cache
.get_response(id, || async {
let shop = Shop::get(&env.db, id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(ETagReply::<Bincode>::from_serializable(&shop)?),
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&shop)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list(
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<ListParams, CachedResponse>::pick_cache(
accept,
&CACHES.list_shops_bin,
&CACHES.list_shops,
);
let response = cache
.get_response(list_params.clone(), || async {
let shops = Shop::list(&env.db, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(ETagReply::<Bincode>::from_serializable(&shops)?),
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&shops)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn create(
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: mut shop,
content_type,
} = DeserializedBody::<PostedShop>::from_bytes(bytes, content_type).map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
shop.owner_id = Some(owner_id);
let mut tx = env
.db
.begin()
.await
.map_err(|error| reject_anyhow(anyhow!(error)))?;
let saved_shop = Shop::create(shop, &mut tx).await.map_err(reject_anyhow)?;
// also save empty interior_ref_list and merchandise_list rows
let interior_ref_list = PostedInteriorRefList {
shop_id: saved_shop.id,
owner_id: Some(owner_id),
ref_list: sqlx::types::Json::default(),
shelves: sqlx::types::Json::default(),
};
InteriorRefList::create(interior_ref_list, &mut tx)
.await
.map_err(reject_anyhow)?;
let merchandise_list = PostedMerchandiseList {
shop_id: saved_shop.id,
owner_id: Some(owner_id),
form_list: sqlx::types::Json::default(),
};
MerchandiseList::create(merchandise_list, &mut tx)
.await
.map_err(reject_anyhow)?;
tx.commit()
.await
.map_err(|error| reject_anyhow(anyhow!(error)))?;
let url = saved_shop.url(&env.api_url).map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&saved_shop).map_err(reject_anyhow)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&saved_shop).map_err(reject_anyhow)?)
}
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.list_shops.clear().await;
CACHES.list_shops_bin.clear().await;
});
Ok(reply)
}
pub async fn update(
id: i32,
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: mut shop,
content_type,
} = DeserializedBody::<PostedShop>::from_bytes(bytes, content_type).map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
shop.owner_id = match shop.owner_id {
// allows an owner to transfer ownership of shop to another owner
Some(posted_owner_id) => Some(posted_owner_id),
None => Some(owner_id),
};
let updated_shop = Shop::update(shop, &env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
let url = updated_shop.url(&env.api_url).map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&updated_shop).map_err(reject_anyhow)?)
}
ContentType::Json => {
Box::new(ETagReply::<Json>::from_serializable(&updated_shop).map_err(reject_anyhow)?)
}
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
CACHES.shop.delete_response(id).await;
CACHES.shop_bin.delete_response(id).await;
CACHES.list_shops.clear().await;
CACHES.list_shops_bin.clear().await;
});
Ok(reply)
}
pub async fn delete(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
Shop::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
tokio::spawn(async move {
CACHES.shop.delete_response(id).await;
CACHES.shop_bin.delete_response(id).await;
CACHES.list_shops.clear().await;
CACHES.list_shops_bin.clear().await;
CACHES
.interior_ref_list_by_shop_id
.delete_response(id)
.await;
CACHES
.interior_ref_list_by_shop_id_bin
.delete_response(id)
.await;
CACHES.merchandise_list_by_shop_id.delete_response(id).await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(id)
.await;
});
Ok(StatusCode::NO_CONTENT)
}

View File

@ -1,238 +0,0 @@
use anyhow::{anyhow, Result};
use http::StatusCode;
use http_api_problem::HttpApiProblem;
use hyper::body::Bytes;
use mime::Mime;
use uuid::Uuid;
use warp::reply::{with_header, with_status};
use warp::{reject, Rejection, Reply};
use crate::caches::{CachedResponse, CACHES};
use crate::models::{ListParams, MerchandiseList, PostedTransaction, Shop, Transaction};
use crate::problem::reject_anyhow;
use crate::Environment;
use super::{
authenticate, check_etag, AcceptHeader, Bincode, ContentType, DataReply, DeserializedBody,
ETagReply, Json, TypedCache,
};
pub async fn get(
id: i32,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<i32, CachedResponse>::pick_cache(
accept,
&CACHES.transaction_bin,
&CACHES.transaction,
);
let response = cache
.get_response(id, || async {
let transaction = Transaction::get(&env.db, id).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&transaction)?)
}
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&transaction)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list(
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<ListParams, CachedResponse>::pick_cache(
accept,
&CACHES.list_transactions_bin,
&CACHES.list_transactions,
);
let response = cache
.get_response(list_params.clone(), || async {
let transactions = Transaction::list(&env.db, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&transactions)?)
}
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&transactions)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn list_by_shop_id(
shop_id: i32,
list_params: ListParams,
etag: Option<String>,
accept: Option<AcceptHeader>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let TypedCache {
content_type,
cache,
} = TypedCache::<(i32, ListParams), CachedResponse>::pick_cache(
accept,
&CACHES.list_transactions_by_shop_id_bin,
&CACHES.list_transactions_by_shop_id,
);
let response = cache
.get_response((shop_id, list_params.clone()), || async {
let transactions = Transaction::list_by_shop_id(&env.db, shop_id, &list_params).await?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => {
Box::new(ETagReply::<Bincode>::from_serializable(&transactions)?)
}
ContentType::Json => Box::new(ETagReply::<Json>::from_serializable(&transactions)?),
};
let reply = with_status(reply, StatusCode::OK);
Ok(reply)
})
.await?;
Ok(check_etag(etag, response))
}
pub async fn create(
bytes: Bytes,
api_key: Option<Uuid>,
content_type: Option<Mime>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let DeserializedBody {
body: mut transaction,
content_type,
} = DeserializedBody::<PostedTransaction>::from_bytes(bytes, content_type)
.map_err(reject_anyhow)?;
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
transaction.owner_id = Some(owner_id);
let mut tx = env
.db
.begin()
.await
.map_err(|error| reject_anyhow(anyhow!(error)))?;
let saved_transaction = Transaction::create(transaction, &mut tx)
.await
.map_err(reject_anyhow)?;
if !Shop::accepts_keywords(
&mut tx,
saved_transaction.shop_id,
&saved_transaction.keywords,
)
.await
.map_err(reject_anyhow)?
{
return Err(reject::custom(
HttpApiProblem::with_title_and_type_from_status(StatusCode::BAD_REQUEST)
.set_title("Unacceptable Merchandise Type")
.set_detail("Shop does not accept that kind of merchandise"),
));
}
let (quantity_delta, shop_gold_delta) = match saved_transaction.is_sell {
true => (saved_transaction.quantity, saved_transaction.price * -1),
false => (saved_transaction.quantity * -1, saved_transaction.price),
};
let updated_merchandise_list = MerchandiseList::update_merchandise_quantity(
&mut tx,
saved_transaction.shop_id,
&(saved_transaction.mod_name),
saved_transaction.local_form_id,
&(saved_transaction.name),
saved_transaction.form_type,
saved_transaction.is_food,
saved_transaction.price,
quantity_delta,
&saved_transaction.keywords,
)
.await
.map_err(reject_anyhow)?;
Shop::update_gold(&mut tx, saved_transaction.shop_id, shop_gold_delta)
.await
.map_err(reject_anyhow)?;
tx.commit()
.await
.map_err(|error| reject_anyhow(anyhow!(error)))?;
let url = saved_transaction.url(&env.api_url).map_err(reject_anyhow)?;
let reply: Box<dyn Reply> = match content_type {
ContentType::Bincode => Box::new(
ETagReply::<Bincode>::from_serializable(&saved_transaction).map_err(reject_anyhow)?,
),
ContentType::Json => Box::new(
ETagReply::<Json>::from_serializable(&saved_transaction).map_err(reject_anyhow)?,
),
};
let reply = with_header(reply, "Location", url.as_str());
let reply = with_status(reply, StatusCode::CREATED);
tokio::spawn(async move {
// TODO: will this make these caches effectively useless?
CACHES
.merchandise_list
.delete_response(updated_merchandise_list.id)
.await;
CACHES
.merchandise_list_bin
.delete_response(updated_merchandise_list.id)
.await;
CACHES
.merchandise_list_by_shop_id
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES
.merchandise_list_by_shop_id_bin
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES.list_transactions.clear().await;
CACHES.list_transactions_bin.clear().await;
CACHES.list_transactions_by_shop_id.clear().await;
CACHES.list_transactions_by_shop_id_bin.clear().await;
CACHES.list_merchandise_lists.clear().await;
CACHES.list_merchandise_lists_bin.clear().await;
CACHES
.shop
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES
.shop_bin
.delete_response(updated_merchandise_list.shop_id)
.await;
CACHES.list_shops.clear().await;
CACHES.list_shops_bin.clear().await;
});
Ok(reply)
}
// Does NOT reverse the transaction side-effects!
pub async fn delete(
id: i32,
api_key: Option<Uuid>,
env: Environment,
) -> Result<impl Reply, Rejection> {
let owner_id = authenticate(&env, api_key).await.map_err(reject_anyhow)?;
Transaction::delete(&env.db, owner_id, id)
.await
.map_err(reject_anyhow)?;
tokio::spawn(async move {
CACHES.transaction.delete_response(id).await;
CACHES.transaction_bin.delete_response(id).await;
CACHES.list_transactions.clear().await;
CACHES.list_transactions_bin.clear().await;
CACHES.list_transactions_by_shop_id.clear().await;
CACHES.list_transactions_by_shop_id_bin.clear().await;
});
Ok(StatusCode::NO_CONTENT)
}

View File

@ -1,100 +1,116 @@
#[macro_use]
extern crate lazy_static;
use anyhow::Result; use anyhow::Result;
use clap::Clap;
use dotenv::dotenv; use dotenv::dotenv;
use http::header::SERVER; use http::StatusCode;
use hyper::{body::Bytes, server::Server}; use hyper::server::Server;
use listenfd::ListenFd; use listenfd::ListenFd;
use sqlx::postgres::PgPoolOptions; use serde::{de::DeserializeOwned, Serialize};
use sqlx::{migrate, Pool, Postgres}; use sqlx::postgres::PgPool;
use std::convert::Infallible; use std::convert::Infallible;
use std::env; use std::env;
use tracing::info;
use tracing_subscriber::fmt::format::FmtSpan; use tracing_subscriber::fmt::format::FmtSpan;
use url::Url; use url::Url;
use warp::http::Response;
use warp::Filter; use warp::Filter;
mod caches; mod caches;
mod db;
mod handlers; mod handlers;
#[macro_use] #[macro_use]
mod macros; mod macros;
mod models; mod models;
mod problem; mod problem;
use handlers::SERVER_STRING; use caches::Caches;
use models::interior_ref_list::PostedInteriorRefList;
use models::merchandise_list::{MerchandiseParams, PostedMerchandiseList};
use models::owner::PostedOwner;
use models::shop::PostedShop;
use models::ListParams; use models::ListParams;
#[derive(Clap)]
#[clap(version = "0.1.0", author = "Tyler Hallada <tyler@hallada.net>")]
struct Opts {
#[clap(short, long)]
migrate: bool,
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Environment { pub struct Environment {
pub db: Pool<Postgres>, pub db: PgPool,
pub caches: Caches,
pub api_url: Url, pub api_url: Url,
} }
impl Environment { impl Environment {
async fn new(api_url: Url) -> Result<Environment> { async fn new(api_url: Url) -> Result<Environment> {
Ok(Environment { Ok(Environment {
db: PgPoolOptions::new() db: PgPool::connect(&env::var("DATABASE_URL")?).await?,
.max_connections(5) caches: Caches::initialize(),
.connect(&env::var("DATABASE_URL")?)
.await?,
api_url, api_url,
}) })
} }
} }
#[derive(Serialize)]
struct ErrorMessage {
code: u16,
message: String,
}
fn with_env(env: Environment) -> impl Filter<Extract = (Environment,), Error = Infallible> + Clone { fn with_env(env: Environment) -> impl Filter<Extract = (Environment,), Error = Infallible> + Clone {
warp::any().map(move || env.clone()) warp::any().map(move || env.clone())
} }
fn extract_body_bytes() -> impl Filter<Extract = (Bytes,), Error = warp::Rejection> + Clone { fn json_body<T>() -> impl Filter<Extract = (T,), Error = warp::Rejection> + Clone
warp::body::content_length_limit(1024 * 1024).and(warp::body::bytes()) where
T: Send + DeserializeOwned,
{
warp::body::content_length_limit(1024 * 64).and(warp::body::json())
} }
#[tokio::main] #[tokio::main]
async fn main() -> Result<()> { async fn main() -> Result<()> {
openssl_probe::init_ssl_cert_env_vars();
dotenv().ok(); dotenv().ok();
let env_log_filter = let env_log_filter =
env::var("RUST_LOG").unwrap_or_else(|_| "warp=info,bazaar_realm_api=info".to_owned()); env::var("RUST_LOG").unwrap_or_else(|_| "warp=info,bazaar_realm_api=info".to_owned());
let (non_blocking_writer, _guard) = tracing_appender::non_blocking(std::io::stdout());
tracing_subscriber::fmt() tracing_subscriber::fmt()
.with_env_filter(env_log_filter) .with_env_filter(env_log_filter)
.with_span_events(FmtSpan::CLOSE) .with_span_events(FmtSpan::CLOSE)
.with_writer(non_blocking_writer)
.init(); .init();
let opts: Opts = Opts::parse();
if opts.migrate {
info!("going to migrate now!");
db::migrate().await;
return Ok(());
}
let host = env::var("HOST").expect("`HOST` environment variable not defined"); let host = env::var("HOST").expect("`HOST` environment variable not defined");
let host_url = Url::parse(&host).expect("Cannot parse URL from `HOST` environment variable"); let host_url = Url::parse(&host).expect("Cannot parse URL from `HOST` environment variable");
let api_url = host_url.join("/v1/")?; let api_url = host_url.join("/v1/")?;
let env = Environment::new(api_url).await?; let env = Environment::new(api_url).await?;
migrate!("db/migrations").run(&env.db).await?;
let status_handler = warp::path::path("status") let status_handler = warp::path::path("status")
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.map(|| Response::builder().header(SERVER, SERVER_STRING).body("Ok")); .map(|| StatusCode::OK); // TODO: return what api versions this server supports instead
let get_owner_handler = warp::path("owners").and( let get_owner_handler = warp::path("owners").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::owner::get), .and_then(handlers::get_owner),
); );
let create_owner_handler = warp::path("owners").and( let create_owner_handler = warp::path("owners").and(
warp::path::end() warp::path::end()
.and(warp::post()) .and(warp::post())
.and(extract_body_bytes()) .and(json_body::<PostedOwner>())
.and(warp::addr::remote()) .and(warp::addr::remote())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("x-real-ip")) .and(warp::header::optional("x-real-ip"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::owner::create), .and_then(handlers::create_owner),
); );
let delete_owner_handler = warp::path("owners").and( let delete_owner_handler = warp::path("owners").and(
warp::path::param() warp::path::param()
@ -102,44 +118,38 @@ async fn main() -> Result<()> {
.and(warp::delete()) .and(warp::delete())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::owner::delete), .and_then(handlers::delete_owner),
); );
let update_owner_handler = warp::path("owners").and( let update_owner_handler = warp::path("owners").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedOwner>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::owner::update), .and_then(handlers::update_owner),
); );
let list_owners_handler = warp::path("owners").and( let list_owners_handler = warp::path("owners").and(
warp::path::end() warp::path::end()
.and(warp::get()) .and(warp::get())
.and(warp::query::<ListParams>()) .and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::owner::list), .and_then(handlers::list_owners),
); );
let get_shop_handler = warp::path("shops").and( let get_shop_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::shop::get), .and_then(handlers::get_shop),
); );
let create_shop_handler = warp::path("shops").and( let create_shop_handler = warp::path("shops").and(
warp::path::end() warp::path::end()
.and(warp::post()) .and(warp::post())
.and(extract_body_bytes()) .and(json_body::<PostedShop>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::shop::create), .and_then(handlers::create_shop),
); );
let delete_shop_handler = warp::path("shops").and( let delete_shop_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
@ -147,44 +157,38 @@ async fn main() -> Result<()> {
.and(warp::delete()) .and(warp::delete())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::shop::delete), .and_then(handlers::delete_shop),
); );
let update_shop_handler = warp::path("shops").and( let update_shop_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedShop>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::shop::update), .and_then(handlers::update_shop),
); );
let list_shops_handler = warp::path("shops").and( let list_shops_handler = warp::path("shops").and(
warp::path::end() warp::path::end()
.and(warp::get()) .and(warp::get())
.and(warp::query::<ListParams>()) .and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::shop::list), .and_then(handlers::list_shops),
); );
let get_interior_ref_list_handler = warp::path("interior_ref_lists").and( let get_interior_ref_list_handler = warp::path("interior_ref_lists").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::get), .and_then(handlers::get_interior_ref_list),
); );
let create_interior_ref_list_handler = warp::path("interior_ref_lists").and( let create_interior_ref_list_handler = warp::path("interior_ref_lists").and(
warp::path::end() warp::path::end()
.and(warp::post()) .and(warp::post())
.and(extract_body_bytes()) .and(json_body::<PostedInteriorRefList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::create), .and_then(handlers::create_interior_ref_list),
); );
let delete_interior_ref_list_handler = warp::path("interior_ref_lists").and( let delete_interior_ref_list_handler = warp::path("interior_ref_lists").and(
warp::path::param() warp::path::param()
@ -192,65 +196,56 @@ async fn main() -> Result<()> {
.and(warp::delete()) .and(warp::delete())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::delete), .and_then(handlers::delete_interior_ref_list),
); );
let update_interior_ref_list_handler = warp::path("interior_ref_lists").and( let update_interior_ref_list_handler = warp::path("interior_ref_lists").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedInteriorRefList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::update), .and_then(handlers::update_interior_ref_list),
); );
let update_interior_ref_list_by_shop_id_handler = warp::path("shops").and( let update_interior_ref_list_by_shop_id_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path("interior_ref_list")) .and(warp::path("interior_ref_list"))
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedInteriorRefList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::update_by_shop_id), .and_then(handlers::update_interior_ref_list_by_shop_id),
); );
let list_interior_ref_lists_handler = warp::path("interior_ref_lists").and( let list_interior_ref_lists_handler = warp::path("interior_ref_lists").and(
warp::path::end() warp::path::end()
.and(warp::get()) .and(warp::get())
.and(warp::query::<ListParams>()) .and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::list), .and_then(handlers::list_interior_ref_lists),
); );
let get_interior_ref_list_by_shop_id_handler = warp::path("shops").and( let get_interior_ref_list_by_shop_id_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path("interior_ref_list")) .and(warp::path("interior_ref_list"))
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::interior_ref_list::get_by_shop_id), .and_then(handlers::get_interior_ref_list_by_shop_id),
); );
let get_merchandise_list_handler = warp::path("merchandise_lists").and( let get_merchandise_list_handler = warp::path("merchandise_lists").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::get), .and_then(handlers::get_merchandise_list),
); );
let create_merchandise_list_handler = warp::path("merchandise_lists").and( let create_merchandise_list_handler = warp::path("merchandise_lists").and(
warp::path::end() warp::path::end()
.and(warp::post()) .and(warp::post())
.and(extract_body_bytes()) .and(json_body::<PostedMerchandiseList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::create), .and_then(handlers::create_merchandise_list),
); );
let delete_merchandise_list_handler = warp::path("merchandise_lists").and( let delete_merchandise_list_handler = warp::path("merchandise_lists").and(
warp::path::param() warp::path::param()
@ -258,93 +253,51 @@ async fn main() -> Result<()> {
.and(warp::delete()) .and(warp::delete())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::delete), .and_then(handlers::delete_merchandise_list),
); );
let update_merchandise_list_handler = warp::path("merchandise_lists").and( let update_merchandise_list_handler = warp::path("merchandise_lists").and(
warp::path::param() warp::path::param()
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedMerchandiseList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::update), .and_then(handlers::update_merchandise_list),
); );
let update_merchandise_list_by_shop_id_handler = warp::path("shops").and( let update_merchandise_list_by_shop_id_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path("merchandise_list")) .and(warp::path("merchandise_list"))
.and(warp::path::end()) .and(warp::path::end())
.and(warp::patch()) .and(warp::patch())
.and(extract_body_bytes()) .and(json_body::<PostedMerchandiseList>())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::update_by_shop_id), .and_then(handlers::update_merchandise_list_by_shop_id),
); );
let list_merchandise_lists_handler = warp::path("merchandise_lists").and( let list_merchandise_lists_handler = warp::path("merchandise_lists").and(
warp::path::end() warp::path::end()
.and(warp::get()) .and(warp::get())
.and(warp::query::<ListParams>()) .and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::list), .and_then(handlers::list_merchandise_lists),
); );
let get_merchandise_list_by_shop_id_handler = warp::path("shops").and( let get_merchandise_list_by_shop_id_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path("merchandise_list")) .and(warp::path("merchandise_list"))
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get()) .and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::merchandise_list::get_by_shop_id), .and_then(handlers::get_merchandise_list_by_shop_id),
); );
let get_transaction_handler = warp::path("transactions").and( let buy_merchandise_handler = warp::path("shops").and(
warp::path::param() warp::path::param()
.and(warp::path("merchandise_list"))
.and(warp::path::end()) .and(warp::path::end())
.and(warp::get())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone()))
.and_then(handlers::transaction::get),
);
let create_transaction_handler = warp::path("transactions").and(
warp::path::end()
.and(warp::post()) .and(warp::post())
.and(extract_body_bytes()) .and(warp::query::<MerchandiseParams>())
.and(warp::header::optional("api-key"))
.and(warp::header::optional("content-type"))
.and(with_env(env.clone()))
.and_then(handlers::transaction::create),
);
let delete_transaction_handler = warp::path("transactions").and(
warp::path::param()
.and(warp::path::end())
.and(warp::delete())
.and(warp::header::optional("api-key")) .and(warp::header::optional("api-key"))
.and(with_env(env.clone())) .and(with_env(env.clone()))
.and_then(handlers::transaction::delete), .and_then(handlers::buy_merchandise),
);
let list_transactions_handler = warp::path("transactions").and(
warp::path::end()
.and(warp::get())
.and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone()))
.and_then(handlers::transaction::list),
);
let list_transactions_by_shop_id_handler = warp::path("shops").and(
warp::path::param()
.and(warp::path("transactions"))
.and(warp::path::end())
.and(warp::get())
.and(warp::query::<ListParams>())
.and(warp::header::optional("if-none-match"))
.and(warp::header::optional("accept"))
.and(with_env(env.clone()))
.and_then(handlers::transaction::list_by_shop_id),
); );
let routes = warp::path("v1") let routes = warp::path("v1")
@ -364,7 +317,7 @@ async fn main() -> Result<()> {
get_merchandise_list_by_shop_id_handler, get_merchandise_list_by_shop_id_handler,
update_interior_ref_list_by_shop_id_handler, update_interior_ref_list_by_shop_id_handler,
update_merchandise_list_by_shop_id_handler, update_merchandise_list_by_shop_id_handler,
list_transactions_by_shop_id_handler, buy_merchandise_handler,
get_interior_ref_list_handler, get_interior_ref_list_handler,
delete_interior_ref_list_handler, delete_interior_ref_list_handler,
update_interior_ref_list_handler, update_interior_ref_list_handler,
@ -375,31 +328,12 @@ async fn main() -> Result<()> {
update_merchandise_list_handler, update_merchandise_list_handler,
create_merchandise_list_handler, create_merchandise_list_handler,
list_merchandise_lists_handler, list_merchandise_lists_handler,
get_transaction_handler,
delete_transaction_handler,
create_transaction_handler,
list_transactions_handler,
// warp::any().map(|| StatusCode::NOT_FOUND), // warp::any().map(|| StatusCode::NOT_FOUND),
)) ))
.recover(problem::unpack_problem) .recover(problem::unpack_problem)
.with(warp::compression::gzip()) .with(warp::compression::gzip())
.with(warp::trace::request()); .with(warp::trace::request());
if let Ok(tls_cert) = env::var("TLS_CERT") {
if let Ok(tls_key) = env::var("TLS_KEY") {
let port = env::var("PORT")
.unwrap_or_else(|_| "443".to_owned())
.parse()?;
warp::serve(routes)
.tls()
.cert_path(tls_cert)
.key_path(tls_key)
.run(([0, 0, 0, 0], port))
.await;
return Ok(());
}
}
let svc = warp::service(routes); let svc = warp::service(routes);
let make_svc = hyper::service::make_service_fn(|_: _| { let make_svc = hyper::service::make_service_fn(|_: _| {
let svc = svc.clone(); let svc = svc.clone();
@ -410,12 +344,10 @@ async fn main() -> Result<()> {
let server = if let Some(l) = listenfd.take_tcp_listener(0)? { let server = if let Some(l) = listenfd.take_tcp_listener(0)? {
Server::from_tcp(l)? Server::from_tcp(l)?
} else { } else {
let port = env::var("PORT") Server::bind(&([127, 0, 0, 1], 3030).into())
.unwrap_or_else(|_| "3030".to_owned())
.parse()?;
Server::bind(&([0, 0, 0, 0], port).into())
}; };
// warp::serve(routes).run(([127, 0, 0, 1], 3030)).await;
server.serve(make_svc).await?; server.serve(make_svc).await?;
Ok(()) Ok(())
} }

View File

@ -1,20 +1,26 @@
use anyhow::{Error, Result}; use anyhow::{Error, Result};
use async_trait::async_trait;
use chrono::prelude::*; use chrono::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::postgres::PgPool;
use sqlx::types::Json; use sqlx::types::Json;
use sqlx::{Done, Executor, Postgres};
use tracing::instrument; use tracing::instrument;
use url::Url;
use super::ListParams; use super::ListParams;
use super::{Model, PostedModel, UpdateableModel};
use crate::problem::forbidden_permission; use crate::problem::forbidden_permission;
#[derive(sqlx::FromRow, Debug, Serialize, Deserialize, Clone)] // sqlx queries for this model need to be `query_as_unchecked!` because `query_as!` does not
// support user-defined types (`ref_list` Json field).
// See for more info: https://github.com/thallada/rust_sqlx_bug/blob/master/src/main.rs
// This may be fixed in sqlx 0.4.
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct InteriorRef { pub struct InteriorRef {
pub base_mod_name: String, pub base_mod_name: String,
pub base_local_form_id: u32, pub base_local_form_id: i32,
pub ref_mod_name: Option<String>, pub ref_mod_name: Option<String>,
pub ref_local_form_id: u32, pub ref_local_form_id: i32,
pub position_x: f32, pub position_x: f32,
pub position_y: f32, pub position_y: f32,
pub position_z: f32, pub position_z: f32,
@ -24,31 +30,12 @@ pub struct InteriorRef {
pub scale: u16, pub scale: u16,
} }
#[derive(sqlx::FromRow, Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Shelf {
pub shelf_type: u32,
pub position_x: f32,
pub position_y: f32,
pub position_z: f32,
pub angle_x: f32,
pub angle_y: f32,
pub angle_z: f32,
pub scale: u16,
pub page: u32,
pub filter_form_type: Option<u32>,
pub filter_is_food: bool,
pub search: Option<String>,
pub sort_on: Option<String>,
pub sort_asc: bool,
}
#[derive(sqlx::FromRow, Debug, Serialize, Deserialize, Clone)]
pub struct InteriorRefList { pub struct InteriorRefList {
pub id: i32, pub id: i32,
pub shop_id: i32, pub shop_id: i32,
pub owner_id: i32, pub owner_id: i32,
pub ref_list: Json<Vec<InteriorRef>>, pub ref_list: Json<Vec<InteriorRef>>,
pub shelves: Json<Vec<Shelf>>,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime, pub updated_at: NaiveDateTime,
} }
@ -58,66 +45,51 @@ pub struct PostedInteriorRefList {
pub shop_id: i32, pub shop_id: i32,
pub owner_id: Option<i32>, pub owner_id: Option<i32>,
pub ref_list: Json<Vec<InteriorRef>>, pub ref_list: Json<Vec<InteriorRef>>,
pub shelves: Json<Vec<Shelf>>,
} }
impl InteriorRefList { impl PostedModel for PostedInteriorRefList {}
pub fn resource_name() -> &'static str {
#[async_trait]
impl Model for InteriorRefList {
fn resource_name() -> &'static str {
"interior_ref_list" "interior_ref_list"
} }
pub fn pk(&self) -> i32 { fn pk(&self) -> i32 {
self.id self.id
} }
pub fn url(&self, api_url: &Url) -> Result<Url> {
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
}
// TODO: this model will probably never need to be accessed through it's ID, should these methods be removed/unimplemented? // TODO: this model will probably never need to be accessed through it's ID, should these methods be removed/unimplemented?
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn get(db: impl Executor<'_, Database = Postgres>, id: i32) -> Result<Self> { async fn get(db: &PgPool, id: i32) -> Result<Self> {
sqlx::query_as!( sqlx::query_as_unchecked!(Self, "SELECT * FROM interior_ref_lists WHERE id = $1", id)
Self, .fetch_one(db)
r#"SELECT id, shop_id, owner_id, created_at, updated_at, .await
ref_list as "ref_list: Json<Vec<InteriorRef>>", .map_err(Error::new)
shelves as "shelves: Json<Vec<Shelf>>"
FROM interior_ref_lists WHERE id = $1"#,
id
)
.fetch_one(db)
.await
.map_err(Error::new)
} }
#[instrument(level = "debug", skip(interior_ref_list, db))] #[instrument(level = "debug", skip(posted, db))]
pub async fn create( async fn create(posted: PostedInteriorRefList, db: &PgPool) -> Result<Self> {
interior_ref_list: PostedInteriorRefList, // TODO:
db: impl Executor<'_, Database = Postgres>, // * Decide if I'll need to make the same changes to merchandise and transactions
) -> Result<Self> { // - answer depends on how many rows of each I expect to insert in one go
Ok(sqlx::query_as!( // * should probably omit ref_list from response
Ok(sqlx::query_as_unchecked!(
Self, Self,
r#"INSERT INTO interior_ref_lists "INSERT INTO interior_ref_lists
(shop_id, owner_id, ref_list, shelves, created_at, updated_at) (shop_id, owner_id, ref_list, created_at, updated_at)
VALUES ($1, $2, $3, $4, now(), now()) VALUES ($1, $2, $3, now(), now())
RETURNING id, shop_id, owner_id, created_at, updated_at, RETURNING *",
ref_list as "ref_list: Json<Vec<InteriorRef>>", posted.shop_id,
shelves as "shelves: Json<Vec<Shelf>>""#, posted.owner_id,
interior_ref_list.shop_id, posted.ref_list,
interior_ref_list.owner_id,
serde_json::json!(interior_ref_list.ref_list),
serde_json::json!(interior_ref_list.shelves),
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn delete( async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64> {
db: impl Executor<'_, Database = Postgres> + Copy,
owner_id: i32,
id: i32,
) -> Result<u64> {
let interior_ref_list = let interior_ref_list =
sqlx::query!("SELECT owner_id FROM interior_ref_lists WHERE id = $1", id) sqlx::query!("SELECT owner_id FROM interior_ref_lists WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
@ -126,8 +98,7 @@ impl InteriorRefList {
return Ok( return Ok(
sqlx::query!("DELETE FROM interior_ref_lists WHERE id = $1", id) sqlx::query!("DELETE FROM interior_ref_lists WHERE id = $1", id)
.execute(db) .execute(db)
.await? .await?,
.rows_affected(),
); );
} else { } else {
return Err(forbidden_permission()); return Err(forbidden_permission());
@ -135,19 +106,14 @@ impl InteriorRefList {
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn list( async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>> {
db: impl Executor<'_, Database = Postgres>,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() { let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!( sqlx::query_as_unchecked!(
Self, Self,
r#"SELECT id, shop_id, owner_id, created_at, updated_at, "SELECT * FROM interior_ref_lists
ref_list as "ref_list: Json<Vec<InteriorRef>>",
shelves as "shelves: Json<Vec<Shelf>>" FROM interior_ref_lists
ORDER BY $1 ORDER BY $1
LIMIT $2 LIMIT $2
OFFSET $3"#, OFFSET $3",
order_by, order_by,
list_params.limit.unwrap_or(10), list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0), list_params.offset.unwrap_or(0),
@ -155,13 +121,11 @@ impl InteriorRefList {
.fetch_all(db) .fetch_all(db)
.await? .await?
} else { } else {
sqlx::query_as!( sqlx::query_as_unchecked!(
Self, Self,
r#"SELECT id, shop_id, owner_id, created_at, updated_at, "SELECT * FROM interior_ref_lists
ref_list as "ref_list: Json<Vec<InteriorRef>>",
shelves as "shelves: Json<Vec<Shelf>>" FROM interior_ref_lists
LIMIT $1 LIMIT $1
OFFSET $2"#, OFFSET $2",
list_params.limit.unwrap_or(10), list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0), list_params.offset.unwrap_or(0),
) )
@ -170,85 +134,77 @@ impl InteriorRefList {
}; };
Ok(result) Ok(result)
} }
}
#[instrument(level = "debug", skip(interior_ref_list, db))] #[async_trait]
pub async fn update( impl UpdateableModel for InteriorRefList {
interior_ref_list: PostedInteriorRefList, #[instrument(level = "debug", skip(posted, db))]
db: impl Executor<'_, Database = Postgres> + Copy, async fn update(
posted: PostedInteriorRefList,
db: &PgPool,
owner_id: i32, owner_id: i32,
id: i32, id: i32,
) -> Result<Self> { ) -> Result<Self> {
let existing_interior_ref_list = let interior_ref_list =
sqlx::query!("SELECT owner_id FROM interior_ref_lists WHERE id = $1", id) sqlx::query!("SELECT owner_id FROM interior_ref_lists WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if existing_interior_ref_list.owner_id == owner_id { if interior_ref_list.owner_id == owner_id {
Ok(sqlx::query_as!( Ok(sqlx::query_as_unchecked!(
Self, Self,
r#"UPDATE interior_ref_lists SET "UPDATE interior_ref_lists SET
ref_list = $2, ref_list = $2,
shelves = $3,
updated_at = now() updated_at = now()
WHERE id = $1 WHERE id = $1
RETURNING id, shop_id, owner_id, created_at, updated_at, RETURNING *",
ref_list as "ref_list: Json<Vec<InteriorRef>>",
shelves as "shelves: Json<Vec<Shelf>>""#,
id, id,
serde_json::json!(interior_ref_list.ref_list), posted.ref_list,
serde_json::json!(interior_ref_list.shelves), )
) .fetch_one(db)
.fetch_one(db) .await?)
.await?) } else {
} else { return Err(forbidden_permission());
return Err(forbidden_permission()); }
} }
} }
#[instrument(level = "debug", skip(db))] impl InteriorRefList {
pub async fn get_by_shop_id( #[instrument(level = "debug", skip(db))]
db: impl Executor<'_, Database = Postgres>, pub async fn get_by_shop_id(db: &PgPool, shop_id: i32) -> Result<Self> {
shop_id: i32, sqlx::query_as_unchecked!(
) -> Result<Self> { Self,
sqlx::query_as!( "SELECT * FROM interior_ref_lists
Self, WHERE shop_id = $1",
r#"SELECT id, shop_id, owner_id, created_at, updated_at, shop_id,
ref_list as "ref_list: Json<Vec<InteriorRef>>", )
shelves as "shelves: Json<Vec<Shelf>>" FROM interior_ref_lists .fetch_one(db)
WHERE shop_id = $1"#, .await
shop_id, .map_err(Error::new)
) }
.fetch_one(db)
.await #[instrument(level = "debug", skip(posted, db))]
.map_err(Error::new) pub async fn update_by_shop_id(
} posted: PostedInteriorRefList,
db: &PgPool,
#[instrument(level = "debug", skip(interior_ref_list, db))] owner_id: i32,
pub async fn update_by_shop_id( shop_id: i32,
interior_ref_list: PostedInteriorRefList, ) -> Result<Self> {
db: impl Executor<'_, Database = Postgres> + Copy, let interior_ref_list = sqlx::query!(
owner_id: i32, "SELECT owner_id FROM interior_ref_lists WHERE shop_id = $1",
shop_id: i32, shop_id
) -> Result<Self> { )
let existing_interior_ref_list = sqlx::query!( .fetch_one(db)
"SELECT owner_id FROM interior_ref_lists WHERE shop_id = $1", .await?;
shop_id if interior_ref_list.owner_id == owner_id {
) Ok(sqlx::query_as_unchecked!(
.fetch_one(db) Self,
.await?; "UPDATE interior_ref_lists SET
if existing_interior_ref_list.owner_id == owner_id { ref_list = $2,
Ok(sqlx::query_as!( updated_at = now()
Self, WHERE shop_id = $1
r#"UPDATE interior_ref_lists SET RETURNING *",
ref_list = $2, shop_id,
shelves = $3, posted.ref_list,
updated_at = now()
WHERE shop_id = $1
RETURNING id, shop_id, owner_id, created_at, updated_at,
ref_list as "ref_list: Json<Vec<InteriorRef>>",
shelves as "shelves: Json<Vec<Shelf>>""#,
shop_id,
serde_json::json!(interior_ref_list.ref_list),
serde_json::json!(interior_ref_list.shelves),
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)

View File

@ -1,27 +1,30 @@
use anyhow::{anyhow, Error, Result}; use anyhow::{Error, Result};
use async_trait::async_trait;
use chrono::prelude::*; use chrono::prelude::*;
use http::StatusCode;
use http_api_problem::HttpApiProblem;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use sqlx::postgres::PgPool;
use sqlx::types::Json; use sqlx::types::Json;
use sqlx::{Done, Executor, Postgres};
use tracing::instrument; use tracing::instrument;
use url::Url;
use super::ListParams; use super::ListParams;
use super::{Model, PostedModel, UpdateableModel};
use crate::problem::forbidden_permission; use crate::problem::forbidden_permission;
// sqlx queries for this model need to be `query_as_unchecked!` because `query_as!` does not
// support user-defined types (`form_list` Json field).
// See for more info: https://github.com/thallada/rust_sqlx_bug/blob/master/src/main.rs
// This may be fixed in sqlx 0.4.
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Merchandise { pub struct Merchandise {
pub mod_name: String, pub mod_name: String,
pub local_form_id: u32, pub local_form_id: i32,
pub name: String, pub name: String,
pub quantity: u32, pub quantity: i32,
pub form_type: u32, pub form_type: i32,
pub is_food: bool, pub is_food: bool,
pub price: u32, pub price: i32,
pub keywords: Vec<String>,
} }
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
@ -41,61 +44,52 @@ pub struct PostedMerchandiseList {
pub form_list: Json<Vec<Merchandise>>, pub form_list: Json<Vec<Merchandise>>,
} }
impl MerchandiseList { impl PostedModel for PostedMerchandiseList {}
pub fn resource_name() -> &'static str {
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub struct MerchandiseParams {
pub mod_name: String,
pub local_form_id: i32,
pub quantity_delta: i32,
}
#[async_trait]
impl Model for MerchandiseList {
fn resource_name() -> &'static str {
"merchandise_list" "merchandise_list"
} }
pub fn pk(&self) -> i32 { fn pk(&self) -> i32 {
self.id self.id
} }
pub fn url(&self, api_url: &Url) -> Result<Url> {
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
}
// TODO: this model will probably never need to be accessed through it's ID, should these methods be removed/unimplemented? // TODO: this model will probably never need to be accessed through it's ID, should these methods be removed/unimplemented?
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn get(db: impl Executor<'_, Database = Postgres>, id: i32) -> Result<Self> { async fn get(db: &PgPool, id: i32) -> Result<Self> {
sqlx::query_as!( sqlx::query_as_unchecked!(Self, "SELECT * FROM merchandise_lists WHERE id = $1", id)
Self, .fetch_one(db)
r#"SELECT id, shop_id, owner_id, created_at, updated_at, .await
form_list as "form_list: Json<Vec<Merchandise>>" .map_err(Error::new)
FROM merchandise_lists
WHERE id = $1"#,
id,
)
.fetch_one(db)
.await
.map_err(Error::new)
} }
#[instrument(level = "debug", skip(merchandise_list, db))] #[instrument(level = "debug", skip(posted, db))]
pub async fn create( async fn create(posted: PostedMerchandiseList, db: &PgPool) -> Result<Self> {
merchandise_list: PostedMerchandiseList, Ok(sqlx::query_as_unchecked!(
db: impl Executor<'_, Database = Postgres>,
) -> Result<Self> {
Ok(sqlx::query_as!(
Self, Self,
r#"INSERT INTO merchandise_lists "INSERT INTO merchandise_lists
(shop_id, owner_id, form_list, created_at, updated_at) (shop_id, owner_id, form_list, created_at, updated_at)
VALUES ($1, $2, $3, now(), now()) VALUES ($1, $2, $3, now(), now())
RETURNING id, shop_id, owner_id, created_at, updated_at, RETURNING *",
form_list as "form_list: Json<Vec<Merchandise>>""#, posted.shop_id,
merchandise_list.shop_id, posted.owner_id,
merchandise_list.owner_id, posted.form_list,
serde_json::json!(merchandise_list.form_list),
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn delete( async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64> {
db: impl Executor<'_, Database = Postgres> + Copy,
owner_id: i32,
id: i32,
) -> Result<u64> {
let merchandise_list = let merchandise_list =
sqlx::query!("SELECT owner_id FROM merchandise_lists WHERE id = $1", id) sqlx::query!("SELECT owner_id FROM merchandise_lists WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
@ -104,8 +98,7 @@ impl MerchandiseList {
return Ok( return Ok(
sqlx::query!("DELETE FROM merchandise_lists WHERE id = $1", id) sqlx::query!("DELETE FROM merchandise_lists WHERE id = $1", id)
.execute(db) .execute(db)
.await? .await?,
.rows_affected(),
); );
} else { } else {
return Err(forbidden_permission()); return Err(forbidden_permission());
@ -113,19 +106,14 @@ impl MerchandiseList {
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn list( async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>> {
db: impl Executor<'_, Database = Postgres>,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() { let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!( sqlx::query_as_unchecked!(
Self, Self,
r#"SELECT id, shop_id, owner_id, created_at, updated_at, "SELECT * FROM merchandise_lists
form_list as "form_list: Json<Vec<Merchandise>>"
FROM merchandise_lists
ORDER BY $1 ORDER BY $1
LIMIT $2 LIMIT $2
OFFSET $3"#, OFFSET $3",
order_by, order_by,
list_params.limit.unwrap_or(10), list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0), list_params.offset.unwrap_or(0),
@ -133,13 +121,11 @@ impl MerchandiseList {
.fetch_all(db) .fetch_all(db)
.await? .await?
} else { } else {
sqlx::query_as!( sqlx::query_as_unchecked!(
Self, Self,
r#"SELECT id, shop_id, owner_id, created_at, updated_at, "SELECT * FROM merchandise_lists
form_list as "form_list: Json<Vec<Merchandise>>"
FROM merchandise_lists
LIMIT $1 LIMIT $1
OFFSET $2"#, OFFSET $2",
list_params.limit.unwrap_or(10), list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0), list_params.offset.unwrap_or(0),
) )
@ -148,29 +134,31 @@ impl MerchandiseList {
}; };
Ok(result) Ok(result)
} }
}
#[instrument(level = "debug", skip(merchandise_list, db))] #[async_trait]
pub async fn update( impl UpdateableModel for MerchandiseList {
merchandise_list: PostedMerchandiseList, #[instrument(level = "debug", skip(posted, db))]
db: impl Executor<'_, Database = Postgres> + Copy, async fn update(
posted: PostedMerchandiseList,
db: &PgPool,
owner_id: i32, owner_id: i32,
id: i32, id: i32,
) -> Result<Self> { ) -> Result<Self> {
let existing_merchandise_list = let merchandise_list =
sqlx::query!("SELECT owner_id FROM merchandise_lists WHERE id = $1", id) sqlx::query!("SELECT owner_id FROM merchandise_lists WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if existing_merchandise_list.owner_id == owner_id { if merchandise_list.owner_id == owner_id {
Ok(sqlx::query_as!( Ok(sqlx::query_as_unchecked!(
Self, Self,
r#"UPDATE merchandise_lists SET "UPDATE merchandise_lists SET
form_list = $2, form_list = $2,
updated_at = now() updated_at = now()
WHERE id = $1 WHERE id = $1
RETURNING id, shop_id, owner_id, created_at, updated_at, RETURNING *",
form_list as "form_list: Json<Vec<Merchandise>>""#,
id, id,
serde_json::json!(merchandise_list.form_list), posted.form_list,
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
@ -178,18 +166,15 @@ impl MerchandiseList {
return Err(forbidden_permission()); return Err(forbidden_permission());
} }
} }
}
impl MerchandiseList {
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn get_by_shop_id( pub async fn get_by_shop_id(db: &PgPool, shop_id: i32) -> Result<Self> {
db: impl Executor<'_, Database = Postgres>, sqlx::query_as_unchecked!(
shop_id: i32,
) -> Result<Self> {
sqlx::query_as!(
Self, Self,
r#"SELECT id, shop_id, owner_id, created_at, updated_at, "SELECT * FROM merchandise_lists
form_list as "form_list: Json<Vec<Merchandise>>" WHERE shop_id = $1",
FROM merchandise_lists
WHERE shop_id = $1"#,
shop_id, shop_id,
) )
.fetch_one(db) .fetch_one(db)
@ -197,30 +182,29 @@ impl MerchandiseList {
.map_err(Error::new) .map_err(Error::new)
} }
#[instrument(level = "debug", skip(merchandise_list, db))] #[instrument(level = "debug", skip(posted, db))]
pub async fn update_by_shop_id( pub async fn update_by_shop_id(
merchandise_list: PostedMerchandiseList, posted: PostedMerchandiseList,
db: impl Executor<'_, Database = Postgres> + Copy, db: &PgPool,
owner_id: i32, owner_id: i32,
shop_id: i32, shop_id: i32,
) -> Result<Self> { ) -> Result<Self> {
let existing_merchandise_list = sqlx::query!( let merchandise_list = sqlx::query!(
"SELECT owner_id FROM merchandise_lists WHERE shop_id = $1", "SELECT owner_id FROM merchandise_lists WHERE shop_id = $1",
shop_id shop_id
) )
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if existing_merchandise_list.owner_id == owner_id { if merchandise_list.owner_id == owner_id {
Ok(sqlx::query_as!( Ok(sqlx::query_as_unchecked!(
Self, Self,
r#"UPDATE merchandise_lists SET "UPDATE merchandise_lists SET
form_list = $2, form_list = $2,
updated_at = now() updated_at = now()
WHERE shop_id = $1 WHERE shop_id = $1
RETURNING id, shop_id, owner_id, created_at, updated_at, RETURNING *",
form_list as "form_list: Json<Vec<Merchandise>>""#,
shop_id, shop_id,
serde_json::json!(merchandise_list.form_list), posted.form_list,
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
@ -231,46 +215,24 @@ impl MerchandiseList {
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn update_merchandise_quantity( pub async fn update_merchandise_quantity(
db: impl Executor<'_, Database = Postgres>, db: &PgPool,
shop_id: i32, shop_id: i32,
mod_name: &str, mod_name: &str,
local_form_id: i32, local_form_id: i32,
name: &str,
form_type: i32,
is_food: bool,
price: i32,
quantity_delta: i32, quantity_delta: i32,
keywords: &[String],
) -> Result<Self> { ) -> Result<Self> {
let add_item = json!([{ Ok(sqlx::query_as_unchecked!(
"mod_name": mod_name,
"local_form_id": local_form_id,
"name": name,
"quantity": quantity_delta,
"form_type": form_type,
"is_food": is_food,
"price": price,
"keywords": keywords,
}]);
Ok(sqlx::query_as!(
Self, Self,
r#"UPDATE "UPDATE
merchandise_lists merchandise_lists
SET SET
form_list = CASE form_list =
WHEN elem_index IS NULL AND quantity IS NULL AND $4 > 0 jsonb_set(
THEN form_list || $5 form_list,
WHEN elem_index IS NOT NULL AND quantity IS NOT NULL AND quantity::int + $4 = 0 array[elem_index::text, 'quantity'],
THEN form_list - elem_index::int to_jsonb(quantity::int + $4),
WHEN elem_index IS NOT NULL AND quantity IS NOT NULL true
THEN jsonb_set( )
form_list,
array[elem_index::text, 'quantity'],
to_jsonb(quantity::int + $4),
true
)
ELSE NULL
END
FROM ( FROM (
SELECT SELECT
pos - 1 as elem_index, pos - 1 as elem_index,
@ -282,44 +244,16 @@ impl MerchandiseList {
shop_id = $1 AND shop_id = $1 AND
elem->>'mod_name' = $2::text AND elem->>'mod_name' = $2::text AND
elem->>'local_form_id' = $3::text elem->>'local_form_id' = $3::text
UNION ALL
SELECT
NULL as elem_index, NULL as quantity
LIMIT 1
) sub ) sub
WHERE WHERE
shop_id = $1 shop_id = $1
RETURNING RETURNING merchandise_lists.*",
merchandise_lists.id,
merchandise_lists.shop_id,
merchandise_lists.owner_id,
merchandise_lists.created_at,
merchandise_lists.updated_at,
merchandise_lists.form_list as "form_list: Json<Vec<Merchandise>>""#,
shop_id, shop_id,
mod_name, mod_name,
&local_form_id.to_string(), local_form_id,
quantity_delta, quantity_delta,
add_item,
) )
.fetch_one(db) .fetch_one(db)
.await .await?)
.map_err(|error| {
let anyhow_error = anyhow!(error);
if let Some(db_error) =
anyhow_error.downcast_ref::<sqlx::postgres::PgDatabaseError>()
{
if db_error.code() == "23502" && db_error.column() == Some("form_list") {
return anyhow!(HttpApiProblem::with_title_and_type_from_status(
StatusCode::NOT_FOUND
)
.set_detail(format!(
"Cannot find merchandise to buy with mod_name: {} and local_form_id: {:#010X}",
mod_name, local_form_id
)));
}
}
anyhow_error
})?)
} }
} }

View File

@ -7,14 +7,12 @@ pub mod merchandise_list;
pub mod model; pub mod model;
pub mod owner; pub mod owner;
pub mod shop; pub mod shop;
pub mod transaction;
pub use interior_ref_list::{InteriorRefList, PostedInteriorRefList}; pub use interior_ref_list::InteriorRefList;
pub use merchandise_list::{MerchandiseList, PostedMerchandiseList}; pub use merchandise_list::{MerchandiseList, MerchandiseParams};
pub use model::{Model, UpdateableModel}; pub use model::{Model, PostedModel, UpdateableModel};
pub use owner::{FullPostedOwner, Owner, PostedOwner}; pub use owner::Owner;
pub use shop::{PostedShop, Shop}; pub use shop::Shop;
pub use transaction::{PostedTransaction, Transaction};
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)] #[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub enum Order { pub enum Order {

View File

@ -5,33 +5,20 @@ use url::Url;
use super::ListParams; use super::ListParams;
// TODO: I stopped using this because I needed to accept a transaction instead of a &PgPool for these methods on certain models. pub trait PostedModel {}
// It would be nice to find a way to impl this trait for all my models so I don't have to keep redoing the `url` function on
// each. But, maybe I'm trying to use Traits in an OOP way and that's bad, idk.
//
// @NyxCode on discord: "on 0.4, you can use impl Executor<'_, Database = Postgres>. I use it everywhere, and it works for
// &PgPool, &mut PgConnection and &mut Transaction"
//
// I attempted to use `impl Executor<Database = Postgres>` in 0.3.5 but it created a recursive type error :(
#[async_trait] #[async_trait]
pub trait Model pub trait Model
where where
Self: std::marker::Sized, Self: std::marker::Sized,
{ {
fn resource_name() -> &'static str; fn resource_name() -> &'static str;
fn pk(&self) -> Option<i32>; fn pk(&self) -> i32;
fn url(&self, api_url: &Url) -> Result<Url> { fn url(&self, api_url: &Url) -> Result<Url> {
if let Some(pk) = self.pk() { Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), pk))?)
} else {
Err(anyhow!(
"Cannot get URL for {} with no primary key",
Self::resource_name()
))
}
} }
async fn get(db: &PgPool, id: i32) -> Result<Self>; async fn get(db: &PgPool, id: i32) -> Result<Self>;
async fn create(self, db: &PgPool) -> Result<Self>; async fn create(posted: dyn PostedModel, db: &PgPool) -> Result<Self>;
async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64>; async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64>;
async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>>; async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>>;
} }
@ -41,5 +28,5 @@ pub trait UpdateableModel
where where
Self: std::marker::Sized, Self: std::marker::Sized,
{ {
async fn update(self, db: &PgPool, owner_id: i32, id: i32) -> Result<Self>; async fn update(posted: dyn PostedModel, db: &PgPool, owner_id: i32, id: i32) -> Result<Self>;
} }

View File

@ -1,13 +1,14 @@
use anyhow::{Error, Result}; use anyhow::{Error, Result};
use async_trait::async_trait;
use chrono::prelude::*; use chrono::prelude::*;
use ipnetwork::IpNetwork; use ipnetwork::IpNetwork;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Done, Executor, Postgres}; use sqlx::postgres::PgPool;
use tracing::instrument; use tracing::instrument;
use url::Url;
use uuid::Uuid; use uuid::Uuid;
use super::ListParams; use super::ListParams;
use super::{Model, PostedModel, UpdateableModel};
use crate::problem::forbidden_permission; use crate::problem::forbidden_permission;
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
@ -26,82 +27,68 @@ pub struct Owner {
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PostedOwner { pub struct PostedOwner {
pub name: String, pub name: String,
pub mod_version: i32, #[serde(skip_serializing)]
} pub api_key: Option<Uuid>,
#[serde(skip_serializing)]
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct FullPostedOwner {
pub name: String,
pub api_key: Uuid,
pub ip_address: Option<IpNetwork>, pub ip_address: Option<IpNetwork>,
pub mod_version: i32, pub mod_version: i32,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
} }
impl Owner { impl PostedModel for PostedOwner {}
pub fn resource_name() -> &'static str {
#[async_trait]
impl Model for Owner {
fn resource_name() -> &'static str {
"owner" "owner"
} }
pub fn pk(&self) -> i32 { fn pk(&self) -> i32 {
self.id self.id
} }
pub fn url(&self, api_url: &Url) -> Result<Url> {
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
}
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn get(db: impl Executor<'_, Database = Postgres>, id: i32) -> Result<Self> { async fn get(db: &PgPool, id: i32) -> Result<Self> {
sqlx::query_as!(Self, "SELECT * FROM owners WHERE id = $1", id) sqlx::query_as!(Self, "SELECT * FROM owners WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await .await
.map_err(Error::new) .map_err(Error::new)
} }
#[instrument(level = "debug", skip(owner, db))] #[instrument(level = "debug", skip(posted, db))]
pub async fn create( async fn create(posted: PostedOwner, db: &PgPool) -> Result<Self> {
owner: FullPostedOwner,
db: impl Executor<'_, Database = Postgres>,
) -> Result<Self> {
Ok(sqlx::query_as!( Ok(sqlx::query_as!(
Self, Self,
"INSERT INTO owners "INSERT INTO owners
(name, api_key, ip_address, mod_version, created_at, updated_at) (name, api_key, ip_address, mod_version, created_at, updated_at)
VALUES ($1, $2, $3, $4, now(), now()) VALUES ($1, $2, $3, $4, now(), now())
RETURNING *", RETURNING *",
owner.name, posted.name,
owner.api_key, posted.api_key,
owner.ip_address, posted.ip_address,
owner.mod_version, posted.mod_version,
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn delete( async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64> {
db: impl Executor<'_, Database = Postgres> + Copy,
owner_id: i32,
id: i32,
) -> Result<u64> {
let owner = sqlx::query!("SELECT id FROM owners WHERE id = $1", id) let owner = sqlx::query!("SELECT id FROM owners WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if owner.id == owner_id { if owner.id == owner_id {
Ok(sqlx::query!("DELETE FROM owners WHERE id = $1", id) Ok(sqlx::query!("DELETE FROM owners WHERE id = $1", id)
.execute(db) .execute(db)
.await? .await?)
.rows_affected())
} else { } else {
return Err(forbidden_permission()); return Err(forbidden_permission());
} }
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn list( async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>> {
db: impl Executor<'_, Database = Postgres>,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() { let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!( sqlx::query_as!(
Self, Self,
@ -129,18 +116,16 @@ impl Owner {
}; };
Ok(result) Ok(result)
} }
}
#[instrument(level = "debug", skip(owner, db))] #[async_trait]
pub async fn update( impl UpdateableModel for Owner {
owner: PostedOwner, #[instrument(level = "debug", skip(posted, db))]
db: impl Executor<'_, Database = Postgres> + Copy, async fn update(posted: PostedOwner, db: &PgPool, owner_id: i32, id: i32) -> Result<Self> {
owner_id: i32, let owner = sqlx::query!("SELECT id FROM owners WHERE id = $1", id)
id: i32,
) -> Result<Self> {
let existing_owner = sqlx::query!("SELECT id FROM owners WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if existing_owner.id == owner_id { if owner.id == owner_id {
Ok(sqlx::query_as!( Ok(sqlx::query_as!(
Self, Self,
"UPDATE owners SET "UPDATE owners SET
@ -150,8 +135,8 @@ impl Owner {
WHERE id = $1 WHERE id = $1
RETURNING *", RETURNING *",
id, id,
owner.name, posted.name,
owner.mod_version, posted.mod_version,
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)

View File

@ -1,11 +1,12 @@
use anyhow::{Error, Result}; use anyhow::{Error, Result};
use async_trait::async_trait;
use chrono::prelude::*; use chrono::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sqlx::{Done, Executor, Postgres}; use sqlx::postgres::PgPool;
use tracing::instrument; use tracing::instrument;
use url::Url;
use super::ListParams; use super::ListParams;
use super::{Model, PostedModel, UpdateableModel};
use crate::problem::forbidden_permission; use crate::problem::forbidden_permission;
#[derive(Debug, Serialize, Deserialize, Clone)] #[derive(Debug, Serialize, Deserialize, Clone)]
@ -14,10 +15,11 @@ pub struct Shop {
pub name: String, pub name: String,
pub owner_id: i32, pub owner_id: i32,
pub description: Option<String>, pub description: Option<String>,
pub gold: i32, // removing these until I figure out the plan for buying and selling
pub shop_type: String, // pub is_not_sell_buy: bool,
pub vendor_keywords: Vec<String>, // pub sell_buy_list_id: i32,
pub vendor_keywords_exclude: bool, // pub vendor_id: i32,
// pub vendor_gold: i32,
pub created_at: NaiveDateTime, pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime, pub updated_at: NaiveDateTime,
} }
@ -27,83 +29,60 @@ pub struct PostedShop {
pub name: String, pub name: String,
pub owner_id: Option<i32>, pub owner_id: Option<i32>,
pub description: Option<String>, pub description: Option<String>,
pub gold: Option<i32>,
pub shop_type: Option<String>,
pub vendor_keywords: Option<Vec<String>>,
pub vendor_keywords_exclude: Option<bool>,
} }
impl Shop { impl PostedModel for PostedShop {}
pub fn resource_name() -> &'static str {
#[async_trait]
impl Model for Shop {
fn resource_name() -> &'static str {
"shop" "shop"
} }
pub fn pk(&self) -> i32 { fn pk(&self) -> i32 {
self.id self.id
} }
pub fn url(&self, api_url: &Url) -> Result<Url> {
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
}
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn get(db: impl Executor<'_, Database = Postgres>, id: i32) -> Result<Self> { async fn get(db: &PgPool, id: i32) -> Result<Self> {
sqlx::query_as!(Self, "SELECT * FROM shops WHERE id = $1", id) sqlx::query_as!(Self, "SELECT * FROM shops WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await .await
.map_err(Error::new) .map_err(Error::new)
} }
#[instrument(level = "debug", skip(shop, db))] #[instrument(level = "debug", skip(posted, db))]
pub async fn create( async fn create(posted: PostedShop, db: &PgPool) -> Result<Self> {
shop: PostedShop,
db: impl Executor<'_, Database = Postgres>,
) -> Result<Self> {
Ok(sqlx::query_as!( Ok(sqlx::query_as!(
Self, Self,
"INSERT INTO shops "INSERT INTO shops
(name, owner_id, description, gold, shop_type, vendor_keywords, (name, owner_id, description, created_at, updated_at)
vendor_keywords_exclude, created_at, updated_at) VALUES ($1, $2, $3, now(), now())
VALUES ($1, $2, $3, $4, $5, $6, $7, now(), now())
RETURNING *", RETURNING *",
shop.name, posted.name,
shop.owner_id, posted.owner_id,
shop.description, posted.description,
shop.gold.unwrap_or(0),
shop.shop_type.unwrap_or("general_store".to_string()),
&shop
.vendor_keywords
.unwrap_or_else(|| vec!["VendorItemKey".to_string(), "VendorNoSale".to_string()]),
shop.vendor_keywords_exclude.unwrap_or(true),
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn delete( async fn delete(db: &PgPool, owner_id: i32, id: i32) -> Result<u64> {
db: impl Executor<'_, Database = Postgres> + Copy,
owner_id: i32,
id: i32,
) -> Result<u64> {
let shop = sqlx::query!("SELECT owner_id FROM shops WHERE id = $1", id) let shop = sqlx::query!("SELECT owner_id FROM shops WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if shop.owner_id == owner_id { if shop.owner_id == owner_id {
return Ok(sqlx::query!("DELETE FROM shops WHERE shops.id = $1", id) return Ok(sqlx::query!("DELETE FROM shops WHERE shops.id = $1", id)
.execute(db) .execute(db)
.await? .await?);
.rows_affected());
} else { } else {
return Err(forbidden_permission()); return Err(forbidden_permission());
} }
} }
#[instrument(level = "debug", skip(db))] #[instrument(level = "debug", skip(db))]
pub async fn list( async fn list(db: &PgPool, list_params: &ListParams) -> Result<Vec<Self>> {
db: impl Executor<'_, Database = Postgres>,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() { let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!( sqlx::query_as!(
Self, Self,
@ -131,39 +110,29 @@ impl Shop {
}; };
Ok(result) Ok(result)
} }
}
#[instrument(level = "debug", skip(shop, db))] #[async_trait]
pub async fn update( impl UpdateableModel for Shop {
shop: PostedShop, #[instrument(level = "debug", skip(posted, db))]
db: impl Executor<'_, Database = Postgres> + Copy, async fn update(posted: PostedShop, db: &PgPool, owner_id: i32, id: i32) -> Result<Self> {
owner_id: i32, let shop = sqlx::query!("SELECT owner_id FROM shops WHERE id = $1", id)
id: i32,
) -> Result<Self> {
let existing_shop = sqlx::query!("SELECT owner_id FROM shops WHERE id = $1", id)
.fetch_one(db) .fetch_one(db)
.await?; .await?;
if existing_shop.owner_id == owner_id { if shop.owner_id == owner_id {
Ok(sqlx::query_as!( Ok(sqlx::query_as!(
Self, Self,
"UPDATE shops SET "UPDATE shops SET
name = $2, name = $2,
owner_id = $3, owner_id = $3,
description = $4, description = $4,
gold = $5,
shop_type = $6,
vendor_keywords = $7,
vendor_keywords_exclude = $8,
updated_at = now() updated_at = now()
WHERE id = $1 WHERE id = $1
RETURNING *", RETURNING *",
id, id,
shop.name, posted.name,
shop.owner_id, posted.owner_id,
shop.description, posted.description,
shop.gold,
shop.shop_type,
&shop.vendor_keywords.unwrap_or_else(|| vec![]),
shop.vendor_keywords_exclude,
) )
.fetch_one(db) .fetch_one(db)
.await?) .await?)
@ -171,48 +140,4 @@ impl Shop {
return Err(forbidden_permission()); return Err(forbidden_permission());
} }
} }
#[instrument(level = "debug", skip(db))]
pub async fn accepts_keywords(
db: impl Executor<'_, Database = Postgres>,
id: i32,
keywords: &[String],
) -> Result<bool> {
// Macro not available, see: https://github.com/launchbadge/sqlx/issues/428
Ok(sqlx::query_scalar(
"SELECT EXISTS (
SELECT 1 FROM shops
WHERE id = $1
AND ((
vendor_keywords_exclude = true AND
NOT vendor_keywords && $2
) OR (
vendor_keywords_exclude = false AND
vendor_keywords && $2
))
)",
)
.bind(id)
.bind(keywords)
.fetch_one(db)
.await?)
}
#[instrument(level = "debug", skip(db))]
pub async fn update_gold(
db: impl Executor<'_, Database = Postgres>,
id: i32,
gold_delta: i32,
) -> Result<()> {
sqlx::query!(
"UPDATE shops SET
gold = gold + $2
WHERE id = $1",
id,
gold_delta,
)
.execute(db)
.await?;
Ok(())
}
} }

View File

@ -1,185 +0,0 @@
use anyhow::{Error, Result};
use chrono::prelude::*;
use serde::{Deserialize, Serialize};
use sqlx::{Done, Executor, Postgres};
use tracing::instrument;
use url::Url;
use super::ListParams;
use crate::problem::forbidden_permission;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Transaction {
pub id: i32,
pub shop_id: i32,
pub owner_id: i32,
pub mod_name: String,
pub local_form_id: i32,
pub name: String,
pub form_type: i32,
pub is_food: bool,
pub price: i32,
pub is_sell: bool,
pub quantity: i32,
pub amount: i32,
pub keywords: Vec<String>,
pub created_at: NaiveDateTime,
pub updated_at: NaiveDateTime,
}
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct PostedTransaction {
pub shop_id: i32,
pub owner_id: Option<i32>,
pub mod_name: String,
pub local_form_id: i32,
pub name: String,
pub form_type: i32,
pub is_food: bool,
pub price: i32,
pub is_sell: bool,
pub quantity: i32,
pub amount: i32,
pub keywords: Vec<String>,
}
impl Transaction {
pub fn resource_name() -> &'static str {
"transaction"
}
pub fn pk(&self) -> i32 {
self.id
}
pub fn url(&self, api_url: &Url) -> Result<Url> {
Ok(api_url.join(&format!("{}s/{}", Self::resource_name(), self.pk()))?)
}
#[instrument(level = "debug", skip(db))]
pub async fn get(db: impl Executor<'_, Database = Postgres>, id: i32) -> Result<Self> {
sqlx::query_as!(Self, "SELECT * FROM transactions WHERE id = $1", id)
.fetch_one(db)
.await
.map_err(Error::new)
}
#[instrument(level = "debug", skip(db))]
pub async fn create(
transaction: PostedTransaction,
db: impl Executor<'_, Database = Postgres>,
) -> Result<Self> {
Ok(sqlx::query_as!(
Self,
"INSERT INTO transactions
(shop_id, owner_id, mod_name, local_form_id, name, form_type, is_food, price,
is_sell, quantity, amount, keywords, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, now(), now())
RETURNING *",
transaction.shop_id,
transaction.owner_id,
transaction.mod_name,
transaction.local_form_id,
transaction.name,
transaction.form_type,
transaction.is_food,
transaction.price,
transaction.is_sell,
transaction.quantity,
transaction.amount,
&transaction.keywords,
)
.fetch_one(db)
.await?)
}
#[instrument(level = "debug", skip(db))]
pub async fn delete(
db: impl Executor<'_, Database = Postgres> + Copy,
owner_id: i32,
id: i32,
) -> Result<u64> {
let transaction = sqlx::query!("SELECT owner_id FROM transactions WHERE id = $1", id)
.fetch_one(db)
.await?;
if transaction.owner_id == owner_id {
return Ok(sqlx::query!("DELETE FROM transactions WHERE id = $1", id)
.execute(db)
.await?
.rows_affected());
} else {
return Err(forbidden_permission());
}
}
#[instrument(level = "debug", skip(db))]
pub async fn list(
db: impl Executor<'_, Database = Postgres>,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!(
Self,
"SELECT * FROM transactions
ORDER BY $1
LIMIT $2
OFFSET $3",
order_by,
list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0),
)
.fetch_all(db)
.await?
} else {
sqlx::query_as!(
Self,
"SELECT * FROM transactions
LIMIT $1
OFFSET $2",
list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0),
)
.fetch_all(db)
.await?
};
Ok(result)
}
#[instrument(level = "debug", skip(db))]
pub async fn list_by_shop_id(
db: impl Executor<'_, Database = Postgres>,
shop_id: i32,
list_params: &ListParams,
) -> Result<Vec<Self>> {
let result = if let Some(order_by) = list_params.get_order_by() {
sqlx::query_as!(
Self,
"SELECT * FROM transactions
WHERE shop_id = $1
ORDER BY $2
LIMIT $3
OFFSET $4",
shop_id,
order_by,
list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0),
)
.fetch_all(db)
.await?
} else {
sqlx::query_as!(
Self,
"SELECT * FROM transactions
WHERE shop_id = $1
LIMIT $2
OFFSET $3",
shop_id,
list_params.limit.unwrap_or(10),
list_params.offset.unwrap_or(0),
)
.fetch_all(db)
.await?
};
Ok(result)
}
}

View File

@ -1,5 +1,3 @@
use std::borrow::Borrow;
use anyhow::{anyhow, Error}; use anyhow::{anyhow, Error};
use http::StatusCode; use http::StatusCode;
use http_api_problem::HttpApiProblem; use http_api_problem::HttpApiProblem;
@ -33,7 +31,6 @@ pub fn from_anyhow(error: anyhow::Error) -> HttpApiProblem {
Err(error) => error, Err(error) => error,
}; };
// TODO: should probably decentralize all this error handling to the places where they are relevant
if let Some(sqlx_error) = error.downcast_ref::<sqlx::error::Error>() { if let Some(sqlx_error) = error.downcast_ref::<sqlx::error::Error>() {
match sqlx_error { match sqlx_error {
sqlx::error::Error::RowNotFound => { sqlx::error::Error::RowNotFound => {
@ -51,28 +48,12 @@ pub fn from_anyhow(error: anyhow::Error) -> HttpApiProblem {
dbg!(&code); dbg!(&code);
if let Some(constraint) = pg_error.constraint() { if let Some(constraint) = pg_error.constraint() {
dbg!(&constraint); dbg!(&constraint);
if code == "23503" if code == "23503" && constraint == "shops_owner_id_fkey" {
&& (constraint == "shops_owner_id_fkey"
|| constraint == "interior_ref_lists_owner_id_fkey"
|| constraint == "merchandise_lists_owner_id_fkey"
|| constraint == "transactions_owner_id_fkey")
{
// foreign_key_violation // foreign_key_violation
return HttpApiProblem::with_title_and_type_from_status( return HttpApiProblem::with_title_and_type_from_status(
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
) )
// TODO: better message when this is triggered by a non-cascading DELETE
.set_detail("Owner does not exist"); .set_detail("Owner does not exist");
} else if code == "23503"
&& (constraint == "interior_ref_lists_shop_id_fkey"
|| constraint == "merchandise_lists_shop_id_fkey"
|| constraint == "transactions_shop_id_fkey")
{
// foreign_key_violation
return HttpApiProblem::with_title_and_type_from_status(
StatusCode::BAD_REQUEST,
)
.set_detail("Shop does not exist");
} else if code == "23505" && constraint == "owners_api_key_key" { } else if code == "23505" && constraint == "owners_api_key_key" {
// unique_violation // unique_violation
return HttpApiProblem::with_title_and_type_from_status( return HttpApiProblem::with_title_and_type_from_status(
@ -91,82 +72,20 @@ pub fn from_anyhow(error: anyhow::Error) -> HttpApiProblem {
StatusCode::BAD_REQUEST, StatusCode::BAD_REQUEST,
) )
.set_detail("Owner already has a shop with that name"); .set_detail("Owner already has a shop with that name");
} else if code == "23505" && constraint == "interior_ref_lists_shop_id_key" {
// unique_violation
return HttpApiProblem::with_title_and_type_from_status(
StatusCode::BAD_REQUEST,
)
.set_detail("Interior ref list already exists for that shop");
} else if code == "23505" && constraint == "merchandise_lists_shop_id_key" {
// unique_violation
return HttpApiProblem::with_title_and_type_from_status(
StatusCode::BAD_REQUEST,
)
.set_detail("Merchandise list already exists for that shop");
} else if code == "23514" && constraint == "merchandise_quantity_gt_zero" {
return HttpApiProblem::with_title_and_type_from_status(
StatusCode::BAD_REQUEST,
)
.set_detail("Quantity of merchandise must be greater than zero");
} }
} }
// Might possibly link sensitive info:
// let mut problem = HttpApiProblem::with_title_and_type_from_status(
// StatusCode::INTERNAL_SERVER_ERROR,
// )
// .set_title("Database Error")
// .set_detail(format!(
// "{}. {}",
// pg_error.message(),
// pg_error.detail().unwrap_or("")
// ));
// problem
// .set_value("code".to_string(), &code.to_string())
// .unwrap();
// return problem;
} }
_ => {} _ => {}
} }
} }
if let Some(json_error) = error.downcast_ref::<serde_json::Error>() {
return HttpApiProblem::with_title_and_type_from_status(StatusCode::BAD_REQUEST)
.set_title("Json Body Deserialization Error")
.set_detail(format!("{}", json_error));
}
if let Some(bincode_error) = error.downcast_ref::<bincode::Error>() {
return match bincode_error.borrow() {
bincode::ErrorKind::Io(io_error) => {
HttpApiProblem::with_title_and_type_from_status(StatusCode::BAD_REQUEST)
.set_title("Bincode Body Deserialization Error")
.set_detail(format!("io error ({:?}): {}", io_error.kind(), io_error))
}
error => HttpApiProblem::with_title_and_type_from_status(StatusCode::BAD_REQUEST)
.set_title("Bincode Body Deserialization Error")
.set_detail(format!("{}", error)),
};
}
error!("Recovering unhandled error: {:?}", error); error!("Recovering unhandled error: {:?}", error);
HttpApiProblem::with_title_and_type_from_status(StatusCode::INTERNAL_SERVER_ERROR) // TODO: this leaks internal info, should not stringify error
HttpApiProblem::new(format!("Internal Server Error: {:?}", error))
.set_status(StatusCode::INTERNAL_SERVER_ERROR)
} }
pub async fn unpack_problem(rejection: Rejection) -> Result<impl Reply, Rejection> { pub async fn unpack_problem(rejection: Rejection) -> Result<impl Reply, Rejection> {
if rejection.is_not_found() {
let reply = warp::reply::json(&HttpApiProblem::with_title_and_type_from_status(
StatusCode::NOT_FOUND,
));
let reply = warp::reply::with_status(reply, StatusCode::NOT_FOUND);
let reply = warp::reply::with_header(
reply,
warp::http::header::CONTENT_TYPE,
http_api_problem::PROBLEM_JSON_MEDIA_TYPE,
);
return Ok(reply);
}
if let Some(problem) = rejection.find::<HttpApiProblem>() { if let Some(problem) = rejection.find::<HttpApiProblem>() {
let code = problem.status.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR); let code = problem.status.unwrap_or(StatusCode::INTERNAL_SERVER_ERROR);

File diff suppressed because it is too large Load Diff

View File

@ -9,8 +9,7 @@
"quantity": 1, "quantity": 1,
"form_type": 32, "form_type": 32,
"is_food": false, "is_food": false,
"price": 1, "price": 1
"keywords": ["VendorItemMisc"]
}, },
{ {
"mod_name": "Skyrim.esm", "mod_name": "Skyrim.esm",
@ -19,8 +18,7 @@
"quantity": 2, "quantity": 2,
"form_type": 23, "form_type": 23,
"is_food": false, "is_food": false,
"price": 2, "price": 2
"keywords": ["VendorItemScroll"]
}, },
{ {
"mod_name": "Skyrim.esm", "mod_name": "Skyrim.esm",
@ -29,8 +27,7 @@
"quantity": 3, "quantity": 3,
"form_type": 46, "form_type": 46,
"is_food": true, "is_food": true,
"price": 3, "price": 3
"keywords": ["VendorItemIngredient"]
}, },
{ {
"mod_name": "Skyrim.esm", "mod_name": "Skyrim.esm",
@ -39,8 +36,7 @@
"quantity": 4, "quantity": 4,
"form_type": 41, "form_type": 41,
"is_food": false, "is_food": false,
"price": 4, "price": 4
"keywords": ["VendorItemWeapon"]
} }
] ]
} }

View File

@ -1,13 +0,0 @@
{
"shop_id": 1,
"mod_name": "Skyrim.esm",
"local_form_id": 5,
"name": "New Thing",
"form_type": 41,
"is_food": false,
"price": 100,
"is_sell": false,
"quantity": 1,
"amount": 100,
"keywords": ["VendorItemMisc"]
}