Merge branch 'xMAC94x/updateSQL' into 'master'

Update SQLlite

See merge request veloren/veloren!3862
This commit is contained in:
Marcel 2023-05-04 20:55:15 +00:00
commit 32d8d25124
9 changed files with 75 additions and 78 deletions

37
Cargo.lock generated
View File

@ -2718,11 +2718,11 @@ dependencies = [
[[package]]
name = "hashlink"
version = "0.6.0"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d99cf782f0dc4372d26846bec3de7804ceb5df083c2d4462c0b8d2330e894fa8"
checksum = "69fe1fcf8b4278d860ad0548329f892a3631fb63f82574df68275f34cdbe0ffa"
dependencies = [
"hashbrown 0.9.1",
"hashbrown 0.12.3",
]
[[package]]
@ -3337,9 +3337,9 @@ dependencies = [
[[package]]
name = "libsqlite3-sys"
version = "0.20.1"
version = "0.25.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64d31059f22935e6c31830db5249ba2b7ecd54fd73a9909286f0a67aa55c2fbd"
checksum = "29f835d03d717946d28b1d1ed632eb6f0e24a299388ee623d0c23118d3e8a7fa"
dependencies = [
"cc",
"pkg-config",
@ -5072,8 +5072,9 @@ dependencies = [
[[package]]
name = "refinery"
version = "0.5.0"
source = "git+https://gitlab.com/veloren/refinery.git?rev=8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e#8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6762a07453650132443ac393c5bf4836f790e0d96958507cfd05d560f05034a9"
dependencies = [
"refinery-core",
"refinery-macros",
@ -5081,12 +5082,12 @@ dependencies = [
[[package]]
name = "refinery-core"
version = "0.5.0"
source = "git+https://gitlab.com/veloren/refinery.git?rev=8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e#8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6eb938c73230daa3fbd5ed56f5638068df0e444897e1fd94f30103b0bbc52c00"
dependencies = [
"async-trait",
"cfg-if 1.0.0",
"chrono",
"lazy_static",
"log",
"regex",
@ -5094,21 +5095,23 @@ dependencies = [
"serde",
"siphasher",
"thiserror",
"toml 0.5.11",
"time 0.3.20",
"toml 0.7.3",
"url",
"walkdir 2.3.3",
]
[[package]]
name = "refinery-macros"
version = "0.5.0"
source = "git+https://gitlab.com/veloren/refinery.git?rev=8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e#8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e"
version = "0.8.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b95c4a9354004140b5ac1d3606cbb71c86107887f09d54955f12f97a637c041"
dependencies = [
"proc-macro2 1.0.56",
"quote 1.0.26",
"refinery-core",
"regex",
"syn 1.0.109",
"syn 2.0.15",
]
[[package]]
@ -5323,17 +5326,15 @@ dependencies = [
[[package]]
name = "rusqlite"
version = "0.24.2"
version = "0.28.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5f38ee71cbab2c827ec0ac24e76f82eca723cee92c509a65f67dee393c25112"
checksum = "01e213bc3ecb39ac32e81e51ebe31fd888a940515173e3a18a35f8c6e896422a"
dependencies = [
"bitflags 1.3.2",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"lazy_static",
"libsqlite3-sys",
"memchr",
"smallvec",
]

View File

@ -72,6 +72,7 @@ where
"wgpu_core::swap_chain=info",
"veloren_network_protocol=info",
"quinn_proto::connection=info",
"refinery_core::traits::divergent=off",
"veloren_server::persistence::character=info",
"veloren_server::settings=info",
];

View File

@ -68,8 +68,8 @@ enum-map = "2.4"
noise = { version = "0.7", default-features = false }
censor = "0.2"
rusqlite = { version = "0.24.2", features = ["array", "vtab", "bundled", "trace"] }
refinery = { git = "https://gitlab.com/veloren/refinery.git", rev = "8ecf4b4772d791e6c8c0a3f9b66a7530fad1af3e", features = ["rusqlite"] }
rusqlite = { version = "0.28.0", features = ["array", "vtab", "bundled", "trace"] }
refinery = { version = "0.8.8", features = ["rusqlite"] }
# Plugins
plugin-api = { package = "veloren-plugin-api", path = "../plugin/api"}

View File

@ -30,7 +30,7 @@ use common::{
event::UpdateCharacterMetadata,
};
use core::ops::Range;
use rusqlite::{types::Value, Connection, ToSql, Transaction, NO_PARAMS};
use rusqlite::{types::Value, Connection, ToSql, Transaction};
use std::{num::NonZeroU64, rc::Rc};
use tracing::{debug, error, trace, warn};
@ -96,7 +96,7 @@ pub fn load_items(connection: &Connection, root: i64) -> Result<Vec<Item>, Persi
)?;
let items = stmt
.query_map(&[root], |row| {
.query_map([root], |row| {
Ok(Item {
item_id: row.get(0)?,
parent_container_item_id: row.get(1)?,
@ -139,7 +139,7 @@ pub fn load_character_data(
)?;
let (body_data, character_data) = stmt.query_row(
&[requesting_player_uuid.clone(), char_id.0.to_string()],
[requesting_player_uuid.clone(), char_id.0.to_string()],
|row| {
let character_data = Character {
character_id: row.get(0)?,
@ -187,7 +187,7 @@ pub fn load_character_data(
)?;
let skill_group_data = stmt
.query_map(&[char_id.0], |row| {
.query_map([char_id.0], |row| {
Ok(SkillGroup {
entity_id: char_id.0,
skill_group_kind: row.get(0)?,
@ -212,7 +212,7 @@ pub fn load_character_data(
)?;
let db_pets = stmt
.query_map(&[char_id.0], |row| {
.query_map([char_id.0], |row| {
Ok(Pet {
database_id: row.get(0)?,
name: row.get(1)?,
@ -254,7 +254,7 @@ pub fn load_character_data(
WHERE entity_id = ?1",
)?;
let ability_set_data = stmt.query_row(&[char_id.0], |row| {
let ability_set_data = stmt.query_row([char_id.0], |row| {
Ok(AbilitySets {
entity_id: char_id.0,
ability_sets: row.get(0)?,
@ -304,7 +304,7 @@ pub fn load_character_list(player_uuid_: &str, connection: &Connection) -> Chara
)?;
let characters = stmt
.query_map(&[player_uuid_], |row| {
.query_map([player_uuid_], |row| {
Ok(Character {
character_id: row.get(0)?,
alias: row.get(1)?,
@ -329,7 +329,7 @@ pub fn load_character_list(player_uuid_: &str, connection: &Connection) -> Chara
FROM body
WHERE body_id = ?1",
)?;
let db_body = stmt.query_row(&[char.id.map(|c| c.0)], |row| {
let db_body = stmt.query_row([char.id.map(|c| c.0)], |row| {
Ok(Body {
body_id: row.get(0)?,
variant: row.get(1)?,
@ -426,7 +426,7 @@ pub fn create_character(
)?;
for pseudo_container in pseudo_containers {
stmt.execute(&[
stmt.execute([
&pseudo_container.item_id as &dyn ToSql,
&pseudo_container.parent_container_item_id,
&pseudo_container.item_definition_id,
@ -446,7 +446,7 @@ pub fn create_character(
)?;
let (body_variant, body_json) = convert_body_to_database_json(&body)?;
stmt.execute(&[
stmt.execute([
&character_id as &dyn ToSql,
&body_variant.to_string(),
&body_json,
@ -462,7 +462,7 @@ pub fn create_character(
VALUES (?1, ?2, ?3, ?4)",
)?;
stmt.execute(&[
stmt.execute([
&character_id as &dyn ToSql,
&uuid,
&character_alias,
@ -485,7 +485,7 @@ pub fn create_character(
)?;
for skill_group in db_skill_groups {
stmt.execute(&[
stmt.execute([
&character_id as &dyn ToSql,
&skill_group.skill_group_kind,
&skill_group.earned_exp,
@ -506,7 +506,7 @@ pub fn create_character(
VALUES (?1, ?2)",
)?;
stmt.execute(&[
stmt.execute([
&character_id as &dyn ToSql,
&ability_sets.ability_sets as &dyn ToSql,
])?;
@ -538,7 +538,7 @@ pub fn create_character(
)?;
for item in inserts {
stmt.execute(&[
stmt.execute([
&item.model.item_id as &dyn ToSql,
&item.model.parent_container_item_id,
&item.model.item_definition_id,
@ -586,7 +586,7 @@ pub fn edit_character(
.prepare_cached("UPDATE body SET variant = ?1, body_data = ?2 WHERE body_id = ?3")?;
let (body_variant, body_data) = convert_body_to_database_json(&body)?;
stmt.execute(&[
stmt.execute([
&body_variant.to_string(),
&body_data,
&character_id.0 as &dyn ToSql,
@ -596,7 +596,7 @@ pub fn edit_character(
let mut stmt =
transaction.prepare_cached("UPDATE character SET alias = ?1 WHERE character_id = ?2")?;
stmt.execute(&[&character_alias, &character_id.0 as &dyn ToSql])?;
stmt.execute([&character_alias, &character_id.0 as &dyn ToSql])?;
drop(stmt);
char_list.map(|list| (character_id, list))
@ -618,13 +618,10 @@ pub fn delete_character(
AND player_uuid = ?2",
)?;
let result = stmt.query_row(
&[&char_id.0 as &dyn ToSql, &requesting_player_uuid],
|row| {
let result = stmt.query_row([&char_id.0 as &dyn ToSql, &requesting_player_uuid], |row| {
let y: i64 = row.get(0)?;
Ok(y)
},
)?;
})?;
drop(stmt);
if result != 1 {
@ -641,7 +638,7 @@ pub fn delete_character(
WHERE entity_id = ?1",
)?;
stmt.execute(&[&char_id.0])?;
stmt.execute([&char_id.0])?;
drop(stmt);
let pet_ids = get_pet_ids(char_id, transaction)?
@ -660,7 +657,7 @@ pub fn delete_character(
WHERE entity_id = ?1",
)?;
stmt.execute(&[&char_id.0])?;
stmt.execute([&char_id.0])?;
drop(stmt);
// Delete character
@ -671,7 +668,7 @@ pub fn delete_character(
WHERE character_id = ?1",
)?;
stmt.execute(&[&char_id.0])?;
stmt.execute([&char_id.0])?;
drop(stmt);
// Delete body
@ -682,7 +679,7 @@ pub fn delete_character(
WHERE body_id = ?1",
)?;
stmt.execute(&[&char_id.0])?;
stmt.execute([&char_id.0])?;
drop(stmt);
// Delete all items, recursively walking all containers starting from the
@ -706,7 +703,7 @@ pub fn delete_character(
WHERE EXISTS (SELECT 1 FROM parents WHERE parents.item_id = item.item_id)",
)?;
let deleted_item_count = stmt.execute(&[&char_id.0])?;
let deleted_item_count = stmt.execute([&char_id.0])?;
drop(stmt);
if deleted_item_count < 3 {
@ -734,7 +731,7 @@ pub fn check_character_limit(
)?;
#[allow(clippy::needless_question_mark)]
let character_count: i64 = stmt.query_row(&[&uuid], |row| Ok(row.get(0)?))?;
let character_count: i64 = stmt.query_row([&uuid], |row| Ok(row.get(0)?))?;
drop(stmt);
if character_count < MAX_CHARACTERS_PER_PLAYER as i64 {
@ -765,7 +762,7 @@ fn get_new_entity_ids(
)?;
#[allow(clippy::needless_question_mark)]
let next_entity_id = stmt.query_row(NO_PARAMS, |row| Ok(row.get(0)?))?;
let next_entity_id = stmt.query_row([], |row| Ok(row.get(0)?))?;
let max_entity_id = max(next_entity_id);
// Create a new range of IDs and insert them into the entity table
@ -775,7 +772,7 @@ fn get_new_entity_ids(
// SQLite has no bulk insert
for i in new_ids.clone() {
stmt.execute(&[i])?;
stmt.execute([i])?;
}
trace!(
@ -826,7 +823,7 @@ fn get_pseudo_container_id(
#[allow(clippy::needless_question_mark)]
let res = stmt.query_row(
&[
[
character_id.0.to_string(),
pseudo_container_position.to_string(),
],
@ -896,7 +893,7 @@ fn update_pets(
VALUES (?1, ?2, ?3)"
)?;
stmt.execute(&[
stmt.execute([
&pet_entity_id as &dyn ToSql,
&body_variant.to_string(),
&body_json,
@ -912,7 +909,7 @@ fn update_pets(
VALUES (?1, ?2, ?3)",
)?;
stmt.execute(&[&pet_entity_id as &dyn ToSql, &char_id.0, &stats.name])?;
stmt.execute([&pet_entity_id as &dyn ToSql, &char_id.0, &stats.name])?;
drop(stmt);
pet.get_database_id()
@ -935,7 +932,7 @@ fn get_pet_ids(
#[allow(clippy::needless_question_mark)]
let db_pets = stmt
.query_map(&[&char_id.0], |row| Ok(row.get(0)?))?
.query_map([&char_id.0], |row| Ok(row.get(0)?))?
.map(|x| x.unwrap())
.collect::<Vec<i64>>();
drop(stmt);
@ -954,7 +951,7 @@ fn delete_pets(
WHERE pet_id IN rarray(?1)"
)?;
let delete_count = stmt.execute(&[&pet_ids])?;
let delete_count = stmt.execute([&pet_ids])?;
drop(stmt);
debug!(
"Deleted {} pets for character id {}",
@ -968,7 +965,7 @@ fn delete_pets(
WHERE body_id IN rarray(?1)"
)?;
let delete_count = stmt.execute(&[&pet_ids])?;
let delete_count = stmt.execute([&pet_ids])?;
debug!(
"Deleted {} pet bodies for character id {}",
delete_count, char_id.0
@ -1032,7 +1029,7 @@ pub fn update(
IN rarray(?1)
AND item_id NOT IN rarray(?2)",
)?;
let delete_count = stmt.execute(&[Rc::new(existing_item_ids), Rc::new(non_upserted_items)])?;
let delete_count = stmt.execute([Rc::new(existing_item_ids), Rc::new(non_upserted_items)])?;
trace!("Deleted {} items", delete_count);
// Upsert items
@ -1076,7 +1073,7 @@ pub fn update(
)?;
for item in upserted_items.iter() {
stmt.execute(&[
stmt.execute([
&item.item_id as &dyn ToSql,
&item.parent_container_item_id,
&item.item_definition_id,
@ -1102,7 +1099,7 @@ pub fn update(
)?;
for skill_group in db_skill_groups {
stmt.execute(&[
stmt.execute([
&skill_group.entity_id as &dyn ToSql,
&skill_group.skill_group_kind,
&skill_group.earned_exp,
@ -1122,7 +1119,7 @@ pub fn update(
",
)?;
let waypoint_count = stmt.execute(&[&db_waypoint as &dyn ToSql, &char_id.0])?;
let waypoint_count = stmt.execute([&db_waypoint as &dyn ToSql, &char_id.0])?;
if waypoint_count != 1 {
return Err(PersistenceError::OtherError(format!(
@ -1141,7 +1138,7 @@ pub fn update(
",
)?;
let ability_sets_count = stmt.execute(&[
let ability_sets_count = stmt.execute([
&ability_sets.ability_sets as &dyn ToSql,
&char_id.0 as &dyn ToSql,
])?;

View File

@ -1,5 +1,4 @@
use crate::persistence::{error::PersistenceError, VelorenConnection};
use rusqlite::NO_PARAMS;
use tracing::{debug, info};
/// Performs a one-time migration from diesel to refinery migrations. Copies
@ -23,7 +22,7 @@ pub(crate) fn migrate_from_diesel(
",
)?;
let diesel_migrations_table_exists = stmt.query_row(NO_PARAMS, |row| {
let diesel_migrations_table_exists = stmt.query_row([], |row| {
let row_count: i32 = row.get(0)?;
Ok(row_count > 0)
})?;

View File

@ -177,8 +177,7 @@ pub fn run_migrations(settings: &DatabaseSettings) {
pub fn vacuum_database(settings: &DatabaseSettings) {
let conn = establish_connection(settings, ConnectionMode::ReadWrite);
// The params type is phony; it's required, but not meaningful.
conn.execute::<&[u32]>("VACUUM main", &[])
conn.execute("VACUUM main", [])
.expect("Database vacuuming failed, server startup aborted");
info!("Database vacuumed");
@ -234,13 +233,13 @@ pub(crate) fn establish_connection(
// Use Write-Ahead-Logging for improved concurrency: https://sqlite.org/wal.html
// Set a busy timeout (in ms): https://sqlite.org/c3ref/busy_timeout.html
connection
.pragma_update(None, "foreign_keys", &"ON")
.pragma_update(None, "foreign_keys", "ON")
.expect("Failed to set foreign_keys PRAGMA");
connection
.pragma_update(None, "journal_mode", &"WAL")
.pragma_update(None, "journal_mode", "WAL")
.expect("Failed to set journal_mode PRAGMA");
connection
.pragma_update(None, "busy_timeout", &"250")
.pragma_update(None, "busy_timeout", "250")
.expect("Failed to set busy_timeout PRAGMA");
veloren_connection

View File

@ -59,7 +59,7 @@ criterion = "0.3"
csv = "1.1.3"
tracing-subscriber = { version = "0.3.7", default-features = false, features = ["fmt", "time", "ansi", "smallvec", "env-filter"] }
minifb = "0.23"
rusqlite = { version = "0.24.2", features = ["array", "vtab", "bundled", "trace"] }
rusqlite = { version = "0.28.0", features = ["array", "vtab", "bundled", "trace"] }
svg_fmt = "0.4"
structopt = "0.3"
strum = "0.24"

View File

@ -128,7 +128,7 @@ fn economy_sqlite(world: &World, index: &Index) -> Result<(), Box<dyn Error>> {
.prepare("REPLACE INTO site_price (xcoord, ycoord, good, price) VALUES (?1, ?2, ?3, ?4)")?;
let mut insert_price = move |center: Vec2<i32>, good: Good, prices: &SitePrices| {
let price = prices.values.get(&good).unwrap_or(&0.0);
insert_price_stmt.execute(&[
insert_price_stmt.execute([
&center.x as &dyn ToSql,
&center.y,
&format!("{:?}", good),
@ -142,7 +142,7 @@ fn economy_sqlite(world: &World, index: &Index) -> Result<(), Box<dyn Error>> {
let prices = site.economy.get_site_prices();
conn.execute(
"REPLACE INTO site (xcoord, ycoord, name) VALUES (?1, ?2, ?3)",
&[
[
&civsite.center.x as &dyn ToSql,
&civsite.center.y,
&site.name(),

View File

@ -9,7 +9,7 @@ use rayon::{
iter::{IntoParallelIterator, ParallelIterator},
ThreadPoolBuilder,
};
use rusqlite::{Connection, ToSql, Transaction, TransactionBehavior, NO_PARAMS};
use rusqlite::{Connection, ToSql, Transaction, TransactionBehavior};
use std::{
collections::{HashMap, HashSet},
error::Error,
@ -77,7 +77,7 @@ fn generate(db_path: &str, ymin: Option<i32>, ymax: Option<i32>) -> Result<(), B
let existing_chunks: HashSet<(i32, i32)> = conn
.prepare("SELECT xcoord, ycoord FROM chunk")?
.query(NO_PARAMS)?
.query([])?
.map(|row| Ok((row.get(0)?, row.get(1)?)))
.collect()?;
@ -159,7 +159,7 @@ fn generate(db_path: &str, ymin: Option<i32>, ymax: Option<i32>) -> Result<(), B
")?;
println!("Inserting results for chunk at ({}, {}): {}", x, y, i);
for ((kind, color), count) in block_counts.iter() {
insert_block.execute(&[
insert_block.execute([
&x as &dyn ToSql,
&y,
&format!("{:?}", kind),
@ -170,11 +170,11 @@ fn generate(db_path: &str, ymin: Option<i32>, ymax: Option<i32>) -> Result<(), B
])?;
}
for (kind, count) in sprite_counts.iter() {
insert_sprite.execute(&[&x as &dyn ToSql, &y, &format!("{:?}", kind), &count])?;
insert_sprite.execute([&x as &dyn ToSql, &y, &format!("{:?}", kind), &count])?;
}
let start_time = start_time.duration_since(UNIX_EPOCH)?.as_secs_f64();
let end_time = end_time.duration_since(UNIX_EPOCH)?.as_secs_f64();
insert_chunk.execute(&[&x as &dyn ToSql, &y, &height, &start_time, &end_time])?;
insert_chunk.execute([&x as &dyn ToSql, &y, &height, &start_time, &end_time])?;
if i % 32 == 0 {
println!("Committing last 32 chunks");
drop(insert_block);
@ -193,7 +193,7 @@ fn palette(conn: Connection) -> Result<(), Box<dyn Error>> {
conn.prepare("SELECT kind, r, g, b, SUM(quantity) FROM block GROUP BY kind, r, g, b")?;
let mut block_colors: HashMap<BlockKind, Vec<(Rgb<u8>, i64)>> = HashMap::new();
let mut rows = stmt.query(NO_PARAMS)?;
let mut rows = stmt.query([])?;
while let Some(row) = rows.next()? {
let kind = BlockKind::from_str(&row.get::<_, String>(0)?)?;
let rgb: Rgb<u8> = Rgb::new(row.get(1)?, row.get(2)?, row.get(3)?);