Add world tables and columns, temporary backfill

Too lazy to make a new script, main() has a backfill using plugins.zip. Once I run it I will remove it.
This commit is contained in:
2021-07-21 21:35:11 -04:00
parent 0e0fdfd59d
commit 4b333b3b99
9 changed files with 449 additions and 82 deletions

View File

@@ -1,25 +1,39 @@
use anyhow::{Context, Result};
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use tracing::instrument;
#[derive(Debug, Serialize, Deserialize)]
use super::BATCH_SIZE;
#[derive(Debug, Serialize, Deserialize, FromRow)]
pub struct Cell {
pub id: i32,
pub form_id: i32,
pub master: String,
pub x: Option<i32>,
pub y: Option<i32>,
// TODO: make this not nullable
pub world_id: Option<i32>,
pub is_persistent: bool,
pub updated_at: NaiveDateTime,
pub created_at: NaiveDateTime,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UnsavedCell {
pub form_id: i32,
pub master: String,
pub x: Option<i32>,
pub y: Option<i32>,
pub world_id: Option<i32>,
pub is_persistent: bool,
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pool: &sqlx::Pool<sqlx::Postgres>,
form_id: i32,
master: &str,
x: Option<i32>,
y: Option<i32>,
world_id: Option<i32>,
@@ -28,13 +42,14 @@ pub async fn insert(
sqlx::query_as!(
Cell,
"INSERT INTO cells
(form_id, x, y, world_id, is_persistent, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, now(), now())
ON CONFLICT (form_id, world_id) DO UPDATE
(form_id, master, x, y, world_id, is_persistent, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, now(), now())
ON CONFLICT (form_id, master, world_id) DO UPDATE
SET (x, y, is_persistent, updated_at) =
(EXCLUDED.x, EXCLUDED.y, EXCLUDED.is_persistent, now())
RETURNING *",
form_id,
master,
x,
y,
world_id,
@@ -44,3 +59,48 @@ pub async fn insert(
.await
.context("Failed to insert cell")
}
#[instrument(level = "debug", skip(pool))]
pub async fn batched_insert(
pool: &sqlx::Pool<sqlx::Postgres>,
cells: &[UnsavedCell],
) -> Result<Vec<Cell>> {
let mut saved_cells = vec![];
for batch in cells.chunks(BATCH_SIZE) {
let mut form_ids: Vec<i32> = vec![];
let mut masters: Vec<String> = vec![];
let mut xs: Vec<Option<i32>> = vec![];
let mut ys: Vec<Option<i32>> = vec![];
let mut world_ids: Vec<Option<i32>> = vec![];
let mut is_persistents: Vec<bool> = vec![];
batch.into_iter().for_each(|unsaved_cell| {
form_ids.push(unsaved_cell.form_id);
masters.push(unsaved_cell.master.clone());
xs.push(unsaved_cell.x);
ys.push(unsaved_cell.y);
world_ids.push(unsaved_cell.world_id);
is_persistents.push(unsaved_cell.is_persistent);
});
saved_cells.append(
// sqlx doesn't understand arrays of Options with the query_as! macro
&mut sqlx::query_as(
r#"INSERT INTO cells (form_id, master, x, y, world_id, is_persistent, created_at, updated_at)
SELECT *, now(), now() FROM UNNEST($1::int[], $2::text[], $3::int[], $4::int[], $5::int[], $6::bool[])
ON CONFLICT (form_id, master, world_id) DO UPDATE
SET (x, y, is_persistent, updated_at) =
(EXCLUDED.x, EXCLUDED.y, EXCLUDED.is_persistent, now())
RETURNING *"#,
)
.bind(&form_ids)
.bind(&masters)
.bind(&xs)
.bind(&ys)
.bind(&world_ids)
.bind(&is_persistents)
.fetch_all(pool)
.await
.context("Failed to insert cells")?,
);
}
Ok(saved_cells)
}

View File

@@ -20,6 +20,21 @@ pub struct File {
pub created_at: NaiveDateTime,
}
#[instrument(level = "debug", skip(pool))]
pub async fn get_by_nexus_file_id(
pool: &sqlx::Pool<sqlx::Postgres>,
nexus_file_id: i32,
) -> Result<Option<File>> {
sqlx::query_as!(
File,
"SELECT * FROM files WHERE nexus_file_id = $1",
nexus_file_id,
)
.fetch_optional(pool)
.await
.context("Failed to get file")
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pool: &sqlx::Pool<sqlx::Postgres>,

View File

@@ -6,3 +6,5 @@ pub mod plugin;
pub mod plugin_cell;
pub mod plugin_world;
pub mod world;
pub const BATCH_SIZE: usize = 50;

View File

@@ -9,11 +9,13 @@ pub struct Plugin {
pub name: String,
pub hash: i64,
pub file_id: i32,
pub version: Option<f64>,
pub version: f64,
pub size: i64,
pub author: Option<String>,
pub description: Option<String>,
pub masters: Option<Vec<String>>,
pub masters: Vec<String>,
pub file_name: String,
pub file_path: String,
pub updated_at: NaiveDateTime,
pub created_at: NaiveDateTime,
}
@@ -24,20 +26,22 @@ pub async fn insert(
name: &str,
hash: i64,
file_id: i32,
version: Option<f64>,
version: f64,
size: i64,
author: Option<&str>,
description: Option<&str>,
masters: Option<&[String]>,
masters: &[String],
file_name: &str,
file_path: &str,
) -> Result<Plugin> {
sqlx::query_as!(
Plugin,
"INSERT INTO plugins
(name, hash, file_id, version, size, author, description, masters, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, now(), now())
ON CONFLICT (file_id, name) DO UPDATE
SET (hash, version, author, description, masters, updated_at) =
(EXCLUDED.hash, EXCLUDED.version, EXCLUDED.author, EXCLUDED.description, EXCLUDED.masters, now())
(name, hash, file_id, version, size, author, description, masters, file_name, file_path, created_at, updated_at)
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, now(), now())
ON CONFLICT (file_id, file_path) DO UPDATE
SET (name, hash, version, author, description, masters, file_name, updated_at) =
(EXCLUDED.name, EXCLUDED.hash, EXCLUDED.version, EXCLUDED.author, EXCLUDED.description, EXCLUDED.masters, EXCLUDED.file_name, now())
RETURNING *",
name,
hash,
@@ -46,7 +50,9 @@ pub async fn insert(
size,
author,
description,
masters
masters,
file_name,
file_path
)
.fetch_one(pool)
.await

View File

@@ -1,9 +1,12 @@
use anyhow::{Context, Result};
use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use sqlx::FromRow;
use tracing::instrument;
#[derive(Debug, Serialize, Deserialize)]
use super::BATCH_SIZE;
#[derive(Debug, Serialize, Deserialize, FromRow)]
pub struct PluginCell {
pub id: i32,
pub plugin_id: i32,
@@ -13,6 +16,13 @@ pub struct PluginCell {
pub created_at: NaiveDateTime,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UnsavedPluginCell {
pub plugin_id: i32,
pub cell_id: i32,
pub editor_id: Option<String>,
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pool: &sqlx::Pool<sqlx::Postgres>,
@@ -36,3 +46,38 @@ pub async fn insert(
.await
.context("Failed to insert plugin_cell")
}
#[instrument(level = "debug", skip(pool))]
pub async fn batched_insert(
pool: &sqlx::Pool<sqlx::Postgres>,
plugin_cells: &[UnsavedPluginCell],
) -> Result<Vec<PluginCell>> {
let mut saved_plugin_cells = vec![];
for batch in plugin_cells.chunks(BATCH_SIZE) {
let mut plugin_ids: Vec<i32> = vec![];
let mut cell_ids: Vec<i32> = vec![];
let mut editor_ids: Vec<Option<String>> = vec![];
batch.into_iter().for_each(|unsaved_plugin_cell| {
plugin_ids.push(unsaved_plugin_cell.plugin_id);
cell_ids.push(unsaved_plugin_cell.cell_id);
editor_ids.push(unsaved_plugin_cell.editor_id.as_ref().map(|s| s.clone()));
});
saved_plugin_cells.append(
// sqlx doesn't understand arrays of Options with the query_as! macro
&mut sqlx::query_as(
r#"INSERT INTO plugin_cells (plugin_id, cell_id, editor_id, created_at, updated_at)
SELECT *, now(), now() FROM UNNEST($1::int[], $2::int[], $3::text[])
ON CONFLICT (plugin_id, cell_id) DO UPDATE
SET (editor_id, updated_at) = (EXCLUDED.editor_id, now())
RETURNING *"#,
)
.bind(&plugin_ids)
.bind(&cell_ids)
.bind(&editor_ids)
.fetch_all(pool)
.await
.context("Failed to insert plugin_cells")?,
);
}
Ok(saved_plugin_cells)
}

View File

@@ -3,6 +3,8 @@ use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use super::BATCH_SIZE;
#[derive(Debug, Serialize, Deserialize)]
pub struct PluginWorld {
pub id: i32,
@@ -13,6 +15,13 @@ pub struct PluginWorld {
pub created_at: NaiveDateTime,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UnsavedPluginWorld {
pub plugin_id: i32,
pub world_id: i32,
pub editor_id: String,
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pool: &sqlx::Pool<sqlx::Postgres>,
@@ -36,3 +45,38 @@ pub async fn insert(
.await
.context("Failed to insert plugin_world")
}
#[instrument(level = "debug", skip(pool))]
pub async fn batched_insert(
pool: &sqlx::Pool<sqlx::Postgres>,
plugin_worlds: &[UnsavedPluginWorld],
) -> Result<Vec<PluginWorld>> {
let mut saved_plugin_worlds = vec![];
for batch in plugin_worlds.chunks(BATCH_SIZE) {
let mut plugin_ids: Vec<i32> = vec![];
let mut world_ids: Vec<i32> = vec![];
let mut editor_ids: Vec<String> = vec![];
batch.into_iter().for_each(|unsaved_plugin_world| {
plugin_ids.push(unsaved_plugin_world.plugin_id);
world_ids.push(unsaved_plugin_world.world_id);
editor_ids.push(unsaved_plugin_world.editor_id.clone());
});
saved_plugin_worlds.append(
&mut sqlx::query_as!(
PluginWorld,
r#"INSERT INTO plugin_worlds (plugin_id, world_id, editor_id, created_at, updated_at)
SELECT *, now(), now() FROM UNNEST($1::int[], $2::int[], $3::text[])
ON CONFLICT (plugin_id, world_id) DO UPDATE
SET (editor_id, updated_at) = (EXCLUDED.editor_id, now())
RETURNING *"#,
&plugin_ids,
&world_ids,
&editor_ids,
)
.fetch_all(pool)
.await
.context("Failed to insert plugin_worlds")?,
);
}
Ok(saved_plugin_worlds)
}

View File

@@ -3,6 +3,8 @@ use chrono::NaiveDateTime;
use serde::{Deserialize, Serialize};
use tracing::instrument;
use super::BATCH_SIZE;
#[derive(Debug, Serialize, Deserialize)]
pub struct World {
pub id: i32,
@@ -12,6 +14,12 @@ pub struct World {
pub created_at: NaiveDateTime,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct UnsavedWorld {
pub form_id: i32,
pub master: String,
}
#[instrument(level = "debug", skip(pool))]
pub async fn insert(
pool: &sqlx::Pool<sqlx::Postgres>,
@@ -33,3 +41,35 @@ pub async fn insert(
.await
.context("Failed to insert world")
}
#[instrument(level = "debug", skip(pool))]
pub async fn batched_insert(
pool: &sqlx::Pool<sqlx::Postgres>,
worlds: &[UnsavedWorld],
) -> Result<Vec<World>> {
let mut saved_worlds = vec![];
for batch in worlds.chunks(BATCH_SIZE) {
let mut form_ids: Vec<i32> = vec![];
let mut masters: Vec<String> = vec![];
batch.into_iter().for_each(|unsaved_world| {
form_ids.push(unsaved_world.form_id);
masters.push(unsaved_world.master.clone());
});
saved_worlds.append(
&mut sqlx::query_as!(
World,
r#"INSERT INTO worlds (form_id, master, created_at, updated_at)
SELECT *, now(), now() FROM UNNEST($1::int[], $2::text[])
ON CONFLICT (form_id, master) DO UPDATE
SET updated_at = now()
RETURNING *"#,
&form_ids,
&masters
)
.fetch_all(pool)
.await
.context("Failed to insert worlds")?,
);
}
Ok(saved_worlds)
}