Simplify db migrations
Use `refinery_cli` against a folder of `.sql` migrations. I got tired of commenting out my code when I just wanted to rerun the initial migration. Plain SQL is a lot more flexible than the `barrel` syntax.
This commit is contained in:
@@ -1,76 +0,0 @@
|
||||
use barrel::{backend::Pg, types, Migration};
|
||||
|
||||
pub fn migration() -> String {
|
||||
let mut m = Migration::new();
|
||||
|
||||
m.create_table("owners", |t| {
|
||||
t.add_column("id", types::primary().indexed(true));
|
||||
t.add_column("name", types::varchar(255));
|
||||
t.add_column("api_key", types::uuid().indexed(true));
|
||||
t.add_column("ip_address", types::custom("inet").nullable(true));
|
||||
t.add_column("mod_version", types::integer());
|
||||
t.add_column("created_at", types::custom("timestamp(3)"));
|
||||
t.add_column("updated_at", types::custom("timestamp(3)"));
|
||||
t.add_index(
|
||||
"owners_unique_name_and_api_key",
|
||||
types::index(vec!["name", "api_key"]).unique(true),
|
||||
);
|
||||
});
|
||||
|
||||
m.create_table("shops", |t| {
|
||||
t.add_column("id", types::primary().indexed(true));
|
||||
t.add_column("name", types::varchar(255));
|
||||
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
|
||||
t.add_column("description", types::text().nullable(true));
|
||||
// removing these until I figure out the plan for buying and selling
|
||||
// t.add_column("is_not_sell_buy", types::boolean().default(true));
|
||||
// t.add_column("sell_buy_list_id", types::integer().default(0));
|
||||
// t.add_column("vendor_id", types::integer());
|
||||
// t.add_column("vendor_gold", types::integer());
|
||||
t.add_column("created_at", types::custom("timestamp(3)"));
|
||||
t.add_column("updated_at", types::custom("timestamp(3)"));
|
||||
t.add_index(
|
||||
"shops_unique_name_and_owner_id",
|
||||
types::index(vec!["name", "owner_id"]).unique(true),
|
||||
);
|
||||
});
|
||||
|
||||
m.create_table("merchandise_lists", |t| {
|
||||
t.add_column("id", types::primary().indexed(true));
|
||||
t.add_column(
|
||||
"shop_id",
|
||||
types::foreign("shops", "id").indexed(true).unique(true),
|
||||
);
|
||||
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
|
||||
t.add_column("form_list", types::custom("jsonb"));
|
||||
t.add_column("created_at", types::custom("timestamp(3)"));
|
||||
t.add_column("updated_at", types::custom("timestamp(3)"));
|
||||
});
|
||||
|
||||
// m.create_table("transactions", |t| {
|
||||
// t.add_column("id", types::primary().indexed(true));
|
||||
// t.add_column("shop_id", types::foreign("shops", "id").indexed(true));
|
||||
// t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
|
||||
// t.add_column("merchandise_list_id", types::foreign("merchandise_lists", "id"));
|
||||
// t.add_column("customer_name", types::varchar(255));
|
||||
// t.add_column("is_customer_npc", types::boolean());
|
||||
// t.add_column("is_customer_buying", types::boolean());
|
||||
// t.add_column("quantity", types::integer());
|
||||
// t.add_column("is_void", types::boolean());
|
||||
// t.add_column("created_at", types::custom("timestamp(3)"));
|
||||
// });
|
||||
|
||||
m.create_table("interior_ref_lists", |t| {
|
||||
t.add_column("id", types::primary().indexed(true));
|
||||
t.add_column(
|
||||
"shop_id",
|
||||
types::foreign("shops", "id").indexed(true).unique(true),
|
||||
);
|
||||
t.add_column("owner_id", types::foreign("owners", "id").indexed(true));
|
||||
t.add_column("ref_list", types::custom("jsonb"));
|
||||
t.add_column("created_at", types::custom("timestamp(3)"));
|
||||
t.add_column("updated_at", types::custom("timestamp(3)"));
|
||||
});
|
||||
|
||||
m.make::<Pg>()
|
||||
}
|
||||
35
src/db/migrations/V1__initial.sql
Normal file
35
src/db/migrations/V1__initial.sql
Normal file
@@ -0,0 +1,35 @@
|
||||
CREATE TABLE IF NOT EXISTS "owners" (
|
||||
"id" SERIAL PRIMARY KEY NOT NULL,
|
||||
"name" VARCHAR(255) NOT NULL,
|
||||
"api_key" UUID NOT NULL UNIQUE,
|
||||
"ip_address" inet,
|
||||
"mod_version" INTEGER NOT NULL,
|
||||
"created_at" timestamp(3) NOT NULL,
|
||||
"updated_at" timestamp(3) NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "owners_unique_name_and_api_key" ON "owners" ("name", "api_key");
|
||||
CREATE TABLE "shops" (
|
||||
"id" SERIAL PRIMARY KEY NOT NULL,
|
||||
"name" VARCHAR(255) NOT NULL,
|
||||
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
|
||||
"description" TEXT,
|
||||
"created_at" timestamp(3) NOT NULL,
|
||||
"updated_at" timestamp(3) NOT NULL
|
||||
);
|
||||
CREATE UNIQUE INDEX "shops_unique_name_and_owner_id" ON "shops" ("name", "owner_id");
|
||||
CREATE TABLE "interior_ref_lists" (
|
||||
"id" SERIAL PRIMARY KEY NOT NULL,
|
||||
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
|
||||
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
|
||||
"ref_list" jsonb NOT NULL,
|
||||
"created_at" timestamp(3) NOT NULL,
|
||||
"updated_at" timestamp(3) NOT NULL
|
||||
);
|
||||
CREATE TABLE "merchandise_lists" (
|
||||
"id" SERIAL PRIMARY KEY NOT NULL,
|
||||
"shop_id" INTEGER REFERENCES "shops"(id) NOT NULL UNIQUE,
|
||||
"owner_id" INTEGER REFERENCES "owners"(id) NOT NULL,
|
||||
"form_list" jsonb NOT NULL,
|
||||
"created_at" timestamp(3) NOT NULL,
|
||||
"updated_at" timestamp(3) NOT NULL
|
||||
);
|
||||
@@ -1,3 +0,0 @@
|
||||
use refinery::include_migration_mods;
|
||||
|
||||
include_migration_mods!("src/db/migrations");
|
||||
@@ -1,16 +0,0 @@
|
||||
use refinery::config::Config;
|
||||
|
||||
mod migrations;
|
||||
|
||||
pub async fn migrate() {
|
||||
let mut config = Config::from_file_location("src/db/refinery.toml").unwrap();
|
||||
|
||||
match migrations::runner().run_async(&mut config).await {
|
||||
Ok(report) => {
|
||||
dbg!(report.applied_migrations());
|
||||
}
|
||||
Err(error) => {
|
||||
dbg!(error);
|
||||
}
|
||||
};
|
||||
}
|
||||
5
src/db/reset_db.pgsql
Normal file
5
src/db/reset_db.pgsql
Normal file
@@ -0,0 +1,5 @@
|
||||
DROP TABLE owners CASCADE;
|
||||
DROP TABLE shops CASCADE;
|
||||
DROP TABLE interior_ref_lists CASCADE;
|
||||
DROP TABLE merchandise_lists CASCADE;
|
||||
DROP TABLE refinery_schema_history CASCADE;
|
||||
18
src/main.rs
18
src/main.rs
@@ -1,5 +1,4 @@
|
||||
use anyhow::Result;
|
||||
use clap::Clap;
|
||||
use dotenv::dotenv;
|
||||
use http::StatusCode;
|
||||
use hyper::server::Server;
|
||||
@@ -14,7 +13,6 @@ use url::Url;
|
||||
use warp::Filter;
|
||||
|
||||
mod caches;
|
||||
mod db;
|
||||
mod handlers;
|
||||
#[macro_use]
|
||||
mod macros;
|
||||
@@ -28,13 +26,6 @@ use models::owner::Owner;
|
||||
use models::shop::Shop;
|
||||
use models::ListParams;
|
||||
|
||||
#[derive(Clap)]
|
||||
#[clap(version = "0.1.0", author = "Tyler Hallada <tyler@hallada.net>")]
|
||||
struct Opts {
|
||||
#[clap(short, long)]
|
||||
migrate: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Environment {
|
||||
pub db: PgPool,
|
||||
@@ -69,7 +60,7 @@ fn json_body<T>() -> impl Filter<Extract = (T,), Error = warp::Rejection> + Clon
|
||||
where
|
||||
T: Send + DeserializeOwned,
|
||||
{
|
||||
warp::body::content_length_limit(1024 * 64).and(warp::body::json())
|
||||
warp::body::content_length_limit(1024 * 1024).and(warp::body::json())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
@@ -81,13 +72,6 @@ async fn main() -> Result<()> {
|
||||
.with_env_filter(env_log_filter)
|
||||
.with_span_events(FmtSpan::CLOSE)
|
||||
.init();
|
||||
let opts: Opts = Opts::parse();
|
||||
|
||||
if opts.migrate {
|
||||
info!("going to migrate now!");
|
||||
db::migrate().await;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let host = env::var("HOST").expect("`HOST` environment variable not defined");
|
||||
let host_url = Url::parse(&host).expect("Cannot parse URL from `HOST` environment variable");
|
||||
|
||||
Reference in New Issue
Block a user