2020-06-16 01:00:32 +00:00
|
|
|
//! Database operations related to character data
|
|
|
|
//!
|
2020-09-17 23:02:14 +00:00
|
|
|
//! Methods in this module should remain private to the persistence module -
|
|
|
|
//! database updates and loading are communicated via requests to the
|
|
|
|
//! [`CharacterLoader`] and [`CharacterUpdater`] while results/responses are
|
|
|
|
//! polled and handled each server tick.
|
2020-05-09 15:41:25 +00:00
|
|
|
extern crate diesel;
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
use super::{error::Error, models::*, schema, VelorenTransaction};
|
|
|
|
use crate::{
|
|
|
|
comp,
|
2021-02-23 20:29:27 +00:00
|
|
|
comp::{item::MaterialStatManifest, Inventory},
|
2020-09-17 23:02:14 +00:00
|
|
|
persistence::{
|
|
|
|
character::conversions::{
|
|
|
|
convert_body_from_database, convert_body_to_database_json,
|
|
|
|
convert_character_from_database, convert_inventory_from_database_items,
|
|
|
|
convert_items_to_database_items, convert_loadout_from_database_items,
|
2020-12-13 17:21:51 +00:00
|
|
|
convert_skill_groups_to_database, convert_skills_to_database,
|
2021-01-01 22:39:36 +00:00
|
|
|
convert_stats_from_database, convert_waypoint_from_database_json,
|
|
|
|
convert_waypoint_to_database_json,
|
2020-09-17 23:02:14 +00:00
|
|
|
},
|
2020-11-14 19:32:39 +00:00
|
|
|
character_loader::{CharacterCreationResult, CharacterDataResult, CharacterListResult},
|
2020-09-17 23:02:14 +00:00
|
|
|
error::Error::DatabaseError,
|
|
|
|
PersistedComponents,
|
2020-06-01 21:34:52 +00:00
|
|
|
},
|
2020-05-09 15:41:25 +00:00
|
|
|
};
|
2021-01-08 19:12:09 +00:00
|
|
|
use common::character::{CharacterId, CharacterItem, MAX_CHARACTERS_PER_PLAYER};
|
2020-09-17 23:02:14 +00:00
|
|
|
use core::ops::Range;
|
|
|
|
use diesel::{prelude::*, sql_query, sql_types::BigInt};
|
2021-02-18 22:22:15 +00:00
|
|
|
use std::{collections::VecDeque, sync::Arc};
|
2020-11-16 18:49:00 +00:00
|
|
|
use tracing::{error, trace, warn};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
/// Private module for very tightly coupled database conversion methods. In
|
|
|
|
/// general, these have many invariants that need to be maintained when they're
|
|
|
|
/// called--do not assume it's safe to make these public!
|
|
|
|
mod conversions;
|
|
|
|
|
|
|
|
pub(crate) type EntityId = i64;
|
|
|
|
|
|
|
|
const CHARACTER_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.character";
|
|
|
|
const INVENTORY_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.inventory";
|
|
|
|
const LOADOUT_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.loadout";
|
|
|
|
const INVENTORY_PSEUDO_CONTAINER_POSITION: &str = "inventory";
|
|
|
|
const LOADOUT_PSEUDO_CONTAINER_POSITION: &str = "loadout";
|
|
|
|
const WORLD_PSEUDO_CONTAINER_ID: EntityId = 1;
|
|
|
|
|
|
|
|
#[derive(Clone, Copy)]
|
|
|
|
struct CharacterContainers {
|
|
|
|
inventory_container_id: EntityId,
|
|
|
|
loadout_container_id: EntityId,
|
2020-06-02 08:16:23 +00:00
|
|
|
}
|
|
|
|
|
2021-02-18 22:22:15 +00:00
|
|
|
/// BFS the inventory/loadout to ensure that each is topologically sorted in the
|
|
|
|
/// sense required by convert_inventory_from_database_items to support recursive
|
|
|
|
/// items
|
|
|
|
pub fn load_items_bfs(connection: VelorenTransaction, root: i64) -> Result<Vec<Item>, Error> {
|
|
|
|
use schema::item::dsl::*;
|
|
|
|
let mut items = Vec::new();
|
|
|
|
let mut queue = VecDeque::new();
|
|
|
|
queue.push_front(root);
|
|
|
|
while let Some(id) = queue.pop_front() {
|
|
|
|
let frontier = item
|
|
|
|
.filter(parent_container_item_id.eq(id))
|
|
|
|
.load::<Item>(&*connection)?;
|
|
|
|
for i in frontier.iter() {
|
|
|
|
queue.push_back(i.item_id);
|
|
|
|
}
|
|
|
|
items.extend(frontier);
|
|
|
|
}
|
|
|
|
Ok(items)
|
|
|
|
}
|
|
|
|
|
2020-05-11 10:06:53 +00:00
|
|
|
/// Load stored data for a character.
|
|
|
|
///
|
|
|
|
/// After first logging in, and after a character is selected, we fetch this
|
|
|
|
/// data for the purpose of inserting their persisted data for the entity.
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn load_character_data(
|
|
|
|
requesting_player_uuid: String,
|
|
|
|
char_id: CharacterId,
|
|
|
|
connection: VelorenTransaction,
|
2021-02-23 20:29:27 +00:00
|
|
|
msm: &MaterialStatManifest,
|
2020-09-17 23:02:14 +00:00
|
|
|
) -> CharacterDataResult {
|
2021-02-18 22:22:15 +00:00
|
|
|
use schema::{body::dsl::*, character::dsl::*, skill_group::dsl::*};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
let character_containers = get_pseudo_containers(connection, char_id)?;
|
|
|
|
|
2021-02-18 22:22:15 +00:00
|
|
|
let inventory_items = load_items_bfs(connection, character_containers.inventory_container_id)?;
|
|
|
|
let loadout_items = load_items_bfs(connection, character_containers.loadout_container_id)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
let character_data = character
|
2020-09-17 23:02:14 +00:00
|
|
|
.filter(
|
|
|
|
schema::character::dsl::character_id
|
|
|
|
.eq(char_id)
|
|
|
|
.and(player_uuid.eq(requesting_player_uuid)),
|
|
|
|
)
|
2021-01-01 22:39:36 +00:00
|
|
|
.first::<Character>(&*connection)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
let char_body = body
|
|
|
|
.filter(schema::body::dsl::body_id.eq(char_id))
|
|
|
|
.first::<Body>(&*connection)?;
|
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
let char_waypoint = character_data.waypoint.as_ref().and_then(|x| {
|
|
|
|
match convert_waypoint_from_database_json(&x) {
|
|
|
|
Ok(w) => Some(w),
|
|
|
|
Err(e) => {
|
|
|
|
warn!(
|
|
|
|
"Error reading waypoint from database for character ID {}, error: {}",
|
|
|
|
char_id, e
|
|
|
|
);
|
|
|
|
None
|
|
|
|
},
|
|
|
|
}
|
|
|
|
});
|
2020-11-03 00:12:49 +00:00
|
|
|
|
2020-12-13 17:21:51 +00:00
|
|
|
let skill_data = schema::skill::dsl::skill
|
2021-01-13 08:11:31 +00:00
|
|
|
.filter(schema::skill::dsl::entity_id.eq(char_id))
|
2020-12-13 17:21:51 +00:00
|
|
|
.load::<Skill>(&*connection)?;
|
|
|
|
|
|
|
|
let skill_group_data = skill_group
|
2021-01-13 08:11:31 +00:00
|
|
|
.filter(schema::skill_group::dsl::entity_id.eq(char_id))
|
2020-12-13 17:21:51 +00:00
|
|
|
.load::<SkillGroup>(&*connection)?;
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
Ok((
|
|
|
|
convert_body_from_database(&char_body)?,
|
2021-01-01 22:39:36 +00:00
|
|
|
convert_stats_from_database(character_data.alias, &skill_data, &skill_group_data),
|
2021-02-18 22:22:15 +00:00
|
|
|
convert_inventory_from_database_items(
|
|
|
|
character_containers.inventory_container_id,
|
|
|
|
&inventory_items,
|
|
|
|
character_containers.loadout_container_id,
|
|
|
|
&loadout_items,
|
2021-02-23 20:29:27 +00:00
|
|
|
msm,
|
2021-02-18 22:22:15 +00:00
|
|
|
)?,
|
2020-11-16 18:49:00 +00:00
|
|
|
char_waypoint,
|
2020-09-17 23:02:14 +00:00
|
|
|
))
|
2020-05-11 10:06:53 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/// Loads a list of characters belonging to the player. This data is a small
|
|
|
|
/// subset of the character's data, and is used to render the character and
|
|
|
|
/// their level in the character list.
|
|
|
|
///
|
|
|
|
/// In the event that a join fails, for a character (i.e. they lack an entry for
|
|
|
|
/// stats, body, etc...) the character is skipped, and no entry will be
|
|
|
|
/// returned.
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn load_character_list(
|
|
|
|
player_uuid_: &str,
|
|
|
|
connection: VelorenTransaction,
|
2021-02-23 20:29:27 +00:00
|
|
|
msm: &MaterialStatManifest,
|
2020-09-17 23:02:14 +00:00
|
|
|
) -> CharacterListResult {
|
2021-02-18 22:22:15 +00:00
|
|
|
use schema::{body::dsl::*, character::dsl::*};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
let result = character
|
|
|
|
.filter(player_uuid.eq(player_uuid_))
|
|
|
|
.order(schema::character::dsl::character_id.desc())
|
2021-01-01 22:39:36 +00:00
|
|
|
.load::<Character>(&*connection)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
result
|
|
|
|
.iter()
|
2021-01-01 22:39:36 +00:00
|
|
|
.map(|character_data| {
|
2020-09-17 23:02:14 +00:00
|
|
|
let char = convert_character_from_database(character_data);
|
|
|
|
|
|
|
|
let db_body = body
|
|
|
|
.filter(schema::body::dsl::body_id.eq(character_data.character_id))
|
|
|
|
.first::<Body>(&*connection)?;
|
|
|
|
|
|
|
|
let char_body = convert_body_from_database(&db_body)?;
|
|
|
|
|
|
|
|
let loadout_container_id = get_pseudo_container_id(
|
|
|
|
connection,
|
|
|
|
character_data.character_id,
|
|
|
|
LOADOUT_PSEUDO_CONTAINER_POSITION,
|
|
|
|
)?;
|
|
|
|
|
2021-02-18 22:22:15 +00:00
|
|
|
let loadout_items = load_items_bfs(connection, loadout_container_id)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2021-02-18 22:22:15 +00:00
|
|
|
let loadout =
|
2021-02-23 20:29:27 +00:00
|
|
|
convert_loadout_from_database_items(loadout_container_id, &loadout_items, msm)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
Ok(CharacterItem {
|
|
|
|
character: char,
|
|
|
|
body: char_body,
|
2021-01-08 19:12:09 +00:00
|
|
|
inventory: Inventory::new_with_loadout(loadout),
|
2020-07-08 07:04:28 +00:00
|
|
|
})
|
2020-09-17 23:02:14 +00:00
|
|
|
})
|
|
|
|
.collect()
|
2020-05-09 15:41:25 +00:00
|
|
|
}
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn create_character(
|
2020-05-09 15:41:25 +00:00
|
|
|
uuid: &str,
|
2020-06-16 01:00:32 +00:00
|
|
|
character_alias: &str,
|
2020-09-17 23:02:14 +00:00
|
|
|
persisted_components: PersistedComponents,
|
|
|
|
connection: VelorenTransaction,
|
2021-02-23 20:29:27 +00:00
|
|
|
msm: &MaterialStatManifest,
|
2020-11-14 19:32:39 +00:00
|
|
|
) -> CharacterCreationResult {
|
2020-09-17 23:02:14 +00:00
|
|
|
use schema::item::dsl::*;
|
|
|
|
|
|
|
|
check_character_limit(uuid, connection)?;
|
|
|
|
|
2020-12-13 17:21:51 +00:00
|
|
|
use schema::{body, character, skill_group};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2021-01-08 19:12:09 +00:00
|
|
|
let (body, stats, inventory, waypoint) = persisted_components;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
// Fetch new entity IDs for character, inventory and loadout
|
|
|
|
let mut new_entity_ids = get_new_entity_ids(connection, |next_id| next_id + 3)?;
|
|
|
|
|
|
|
|
// Create pseudo-container items for character
|
|
|
|
let character_id = new_entity_ids.next().unwrap();
|
|
|
|
let inventory_container_id = new_entity_ids.next().unwrap();
|
|
|
|
let loadout_container_id = new_entity_ids.next().unwrap();
|
2020-11-16 18:49:00 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
let pseudo_containers = vec![
|
|
|
|
Item {
|
|
|
|
stack_size: 1,
|
|
|
|
item_id: character_id,
|
|
|
|
parent_container_item_id: WORLD_PSEUDO_CONTAINER_ID,
|
|
|
|
item_definition_id: CHARACTER_PSEUDO_CONTAINER_DEF_ID.to_owned(),
|
2020-11-16 18:49:00 +00:00
|
|
|
position: character_id.to_string(),
|
2020-09-17 23:02:14 +00:00
|
|
|
},
|
|
|
|
Item {
|
|
|
|
stack_size: 1,
|
|
|
|
item_id: inventory_container_id,
|
|
|
|
parent_container_item_id: character_id,
|
|
|
|
item_definition_id: INVENTORY_PSEUDO_CONTAINER_DEF_ID.to_owned(),
|
|
|
|
position: INVENTORY_PSEUDO_CONTAINER_POSITION.to_owned(),
|
|
|
|
},
|
|
|
|
Item {
|
|
|
|
stack_size: 1,
|
|
|
|
item_id: loadout_container_id,
|
|
|
|
parent_container_item_id: character_id,
|
|
|
|
item_definition_id: LOADOUT_PSEUDO_CONTAINER_DEF_ID.to_owned(),
|
|
|
|
position: LOADOUT_PSEUDO_CONTAINER_POSITION.to_owned(),
|
|
|
|
},
|
|
|
|
];
|
|
|
|
let pseudo_container_count = diesel::insert_into(item)
|
|
|
|
.values(pseudo_containers)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if pseudo_container_count != 3 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error inserting initial pseudo containers for character id {} (expected 3, actual {})",
|
|
|
|
character_id, pseudo_container_count
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
let skill_set = stats.skill_set;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
// Insert body record
|
|
|
|
let new_body = Body {
|
|
|
|
body_id: character_id,
|
|
|
|
body_data: convert_body_to_database_json(&body)?,
|
|
|
|
variant: "humanoid".to_string(),
|
|
|
|
};
|
|
|
|
|
|
|
|
let body_count = diesel::insert_into(body::table)
|
|
|
|
.values(&new_body)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if body_count != 1 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error inserting into body table for char_id {}",
|
|
|
|
character_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Insert character record
|
|
|
|
let new_character = NewCharacter {
|
|
|
|
character_id,
|
|
|
|
player_uuid: uuid,
|
|
|
|
alias: &character_alias,
|
2021-01-01 22:39:36 +00:00
|
|
|
waypoint: convert_waypoint_to_database_json(waypoint),
|
2020-09-17 23:02:14 +00:00
|
|
|
};
|
|
|
|
let character_count = diesel::insert_into(character::table)
|
|
|
|
.values(&new_character)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if character_count != 1 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error inserting into character table for char_id {}",
|
|
|
|
character_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
2020-12-13 17:21:51 +00:00
|
|
|
let db_skill_groups = convert_skill_groups_to_database(character_id, skill_set.skill_groups);
|
|
|
|
let skill_groups_count = diesel::insert_into(skill_group::table)
|
|
|
|
.values(&db_skill_groups)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if skill_groups_count != 1 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error inserting into skill_group table for char_id {}",
|
|
|
|
character_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
// Insert default inventory and loadout item records
|
|
|
|
let mut inserts = Vec::new();
|
|
|
|
|
|
|
|
get_new_entity_ids(connection, |mut next_id| {
|
2021-02-18 22:22:15 +00:00
|
|
|
let inserts_ = convert_items_to_database_items(
|
2020-09-17 23:02:14 +00:00
|
|
|
loadout_container_id,
|
|
|
|
&inventory,
|
|
|
|
inventory_container_id,
|
|
|
|
&mut next_id,
|
|
|
|
);
|
|
|
|
inserts = inserts_;
|
|
|
|
next_id
|
2020-05-09 15:41:25 +00:00
|
|
|
})?;
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
let expected_inserted_count = inserts.len();
|
|
|
|
let inserted_items = inserts
|
|
|
|
.into_iter()
|
|
|
|
.map(|item_pair| item_pair.model)
|
|
|
|
.collect::<Vec<_>>();
|
|
|
|
let inserted_count = diesel::insert_into(item)
|
|
|
|
.values(&inserted_items)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if expected_inserted_count != inserted_count {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Expected insertions={}, actual={}, for char_id {}--unsafe to continue transaction.",
|
|
|
|
expected_inserted_count, inserted_count, character_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
2021-02-23 20:29:27 +00:00
|
|
|
load_character_list(uuid, connection, msm).map(|list| (character_id, list))
|
2020-05-09 15:41:25 +00:00
|
|
|
}
|
|
|
|
|
2020-05-11 10:06:53 +00:00
|
|
|
/// Delete a character. Returns the updated character list.
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn delete_character(
|
|
|
|
requesting_player_uuid: &str,
|
|
|
|
char_id: CharacterId,
|
|
|
|
connection: VelorenTransaction,
|
2021-02-23 20:29:27 +00:00
|
|
|
msm: &MaterialStatManifest,
|
2020-09-17 23:02:14 +00:00
|
|
|
) -> CharacterListResult {
|
2021-01-01 22:39:36 +00:00
|
|
|
use schema::{body::dsl::*, character::dsl::*, skill::dsl::*, skill_group::dsl::*};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
// Load the character to delete - ensures that the requesting player
|
|
|
|
// owns the character
|
|
|
|
let _character_data = character
|
|
|
|
.filter(
|
|
|
|
schema::character::dsl::character_id
|
|
|
|
.eq(char_id)
|
|
|
|
.and(player_uuid.eq(requesting_player_uuid)),
|
|
|
|
)
|
|
|
|
.first::<Character>(&*connection)?;
|
2020-05-09 15:41:25 +00:00
|
|
|
|
2020-12-13 17:21:51 +00:00
|
|
|
// Delete skills
|
2021-01-13 08:11:31 +00:00
|
|
|
diesel::delete(skill_group.filter(schema::skill_group::dsl::entity_id.eq(char_id)))
|
2020-12-13 17:21:51 +00:00
|
|
|
.execute(&*connection)?;
|
|
|
|
|
2021-01-13 08:11:31 +00:00
|
|
|
diesel::delete(skill.filter(schema::skill::dsl::entity_id.eq(char_id)))
|
2020-12-13 17:21:51 +00:00
|
|
|
.execute(&*connection)?;
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
// Delete character
|
|
|
|
let character_count = diesel::delete(
|
|
|
|
character
|
|
|
|
.filter(schema::character::dsl::character_id.eq(char_id))
|
|
|
|
.filter(player_uuid.eq(requesting_player_uuid)),
|
|
|
|
)
|
|
|
|
.execute(&*connection)?;
|
2020-08-06 22:08:54 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
if character_count != 1 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error deleting from character table for char_id {}",
|
|
|
|
char_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete body
|
|
|
|
let body_count = diesel::delete(body.filter(schema::body::dsl::body_id.eq(char_id)))
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if body_count != 1 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error deleting from body table for char_id {}",
|
|
|
|
char_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
|
|
|
// Delete all items, recursively walking all containers starting from the
|
|
|
|
// "character" pseudo-container that is the root for all items owned by
|
|
|
|
// a character.
|
|
|
|
let item_count = diesel::sql_query(format!(
|
|
|
|
"
|
|
|
|
WITH RECURSIVE
|
|
|
|
parents AS (
|
|
|
|
SELECT item_id
|
|
|
|
FROM item
|
|
|
|
WHERE item.item_id = {} -- Item with character id is the character pseudo-container
|
|
|
|
UNION ALL
|
|
|
|
SELECT item.item_id
|
|
|
|
FROM item,
|
|
|
|
parents
|
|
|
|
WHERE item.parent_container_item_id = parents.item_id
|
|
|
|
)
|
|
|
|
DELETE
|
|
|
|
FROM item
|
|
|
|
WHERE EXISTS (SELECT 1 FROM parents WHERE parents.item_id = item.item_id)",
|
|
|
|
char_id
|
|
|
|
))
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
if item_count < 3 {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error deleting from item table for char_id {} (expected at least 3 deletions, found \
|
|
|
|
{})",
|
|
|
|
char_id, item_count
|
|
|
|
)));
|
2020-08-06 22:08:54 +00:00
|
|
|
}
|
2020-05-09 15:41:25 +00:00
|
|
|
|
2021-02-23 20:29:27 +00:00
|
|
|
load_character_list(requesting_player_uuid, connection, msm)
|
2020-05-09 15:41:25 +00:00
|
|
|
}
|
|
|
|
|
2020-06-02 08:16:23 +00:00
|
|
|
/// Before creating a character, we ensure that the limit on the number of
|
|
|
|
/// characters has not been exceeded
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn check_character_limit(uuid: &str, connection: VelorenTransaction) -> Result<(), Error> {
|
2020-05-09 15:41:25 +00:00
|
|
|
use diesel::dsl::count_star;
|
|
|
|
use schema::character::dsl::*;
|
|
|
|
|
|
|
|
let character_count = character
|
|
|
|
.select(count_star())
|
|
|
|
.filter(player_uuid.eq(uuid))
|
2020-09-17 23:02:14 +00:00
|
|
|
.load::<i64>(&*connection)?;
|
2020-05-09 15:41:25 +00:00
|
|
|
|
|
|
|
match character_count.first() {
|
|
|
|
Some(count) => {
|
|
|
|
if count < &(MAX_CHARACTERS_PER_PLAYER as i64) {
|
|
|
|
Ok(())
|
|
|
|
} else {
|
|
|
|
Err(Error::CharacterLimitReached)
|
|
|
|
}
|
|
|
|
},
|
|
|
|
_ => Ok(()),
|
|
|
|
}
|
|
|
|
}
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
/// NOTE: This relies heavily on serializability to work correctly.
|
2020-06-02 08:16:23 +00:00
|
|
|
///
|
2020-09-17 23:02:14 +00:00
|
|
|
/// The count function takes the starting entity id, and returns the desired
|
|
|
|
/// count of new entity IDs.
|
|
|
|
///
|
|
|
|
/// These are then inserted into the entities table.
|
|
|
|
fn get_new_entity_ids(
|
|
|
|
conn: VelorenTransaction,
|
|
|
|
mut max: impl FnMut(i64) -> i64,
|
|
|
|
) -> Result<Range<EntityId>, Error> {
|
|
|
|
use super::schema::entity::dsl::*;
|
|
|
|
|
|
|
|
#[derive(QueryableByName)]
|
|
|
|
struct NextEntityId {
|
|
|
|
#[sql_type = "BigInt"]
|
|
|
|
entity_id: i64,
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
// The sqlite_sequence table is used here to avoid reusing entity IDs for
|
|
|
|
// deleted entities. This table always contains the highest used ID for each
|
|
|
|
// AUTOINCREMENT column in a SQLite database.
|
|
|
|
let next_entity_id = sql_query(
|
|
|
|
"
|
|
|
|
SELECT seq + 1 AS entity_id
|
|
|
|
FROM sqlite_sequence
|
|
|
|
WHERE name = 'entity'",
|
|
|
|
)
|
|
|
|
.load::<NextEntityId>(&*conn)?
|
|
|
|
.pop()
|
|
|
|
.ok_or_else(|| Error::OtherError("No rows returned for sqlite_sequence query ".to_string()))?
|
|
|
|
.entity_id;
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
let max_entity_id = max(next_entity_id);
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
// Create a new range of IDs and insert them into the entity table
|
|
|
|
let new_ids: Range<EntityId> = next_entity_id..max_entity_id;
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
let new_entities: Vec<Entity> = new_ids.clone().map(|x| Entity { entity_id: x }).collect();
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
let actual_count = diesel::insert_into(entity)
|
|
|
|
.values(&new_entities)
|
|
|
|
.execute(&*conn)?;
|
|
|
|
|
|
|
|
if actual_count != new_entities.len() {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Error updating entity table: expected to add the range {:?}) to entities, but actual \
|
|
|
|
insertions={}",
|
|
|
|
new_ids, actual_count
|
|
|
|
)));
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2020-09-20 04:57:26 +00:00
|
|
|
trace!(
|
2020-09-17 23:02:14 +00:00
|
|
|
"Created {} new persistence entity_ids: {}",
|
|
|
|
new_ids.end - new_ids.start,
|
|
|
|
new_ids
|
|
|
|
.clone()
|
|
|
|
.map(|x| x.to_string())
|
|
|
|
.collect::<Vec<String>>()
|
|
|
|
.join(", ")
|
|
|
|
);
|
|
|
|
Ok(new_ids)
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
/// Fetches the pseudo_container IDs for a character
|
|
|
|
fn get_pseudo_containers(
|
|
|
|
connection: VelorenTransaction,
|
|
|
|
character_id: CharacterId,
|
|
|
|
) -> Result<CharacterContainers, Error> {
|
|
|
|
let character_containers = CharacterContainers {
|
|
|
|
loadout_container_id: get_pseudo_container_id(
|
|
|
|
connection,
|
|
|
|
character_id,
|
|
|
|
LOADOUT_PSEUDO_CONTAINER_POSITION,
|
|
|
|
)?,
|
|
|
|
inventory_container_id: get_pseudo_container_id(
|
|
|
|
connection,
|
|
|
|
character_id,
|
|
|
|
INVENTORY_PSEUDO_CONTAINER_POSITION,
|
|
|
|
)?,
|
|
|
|
};
|
|
|
|
|
|
|
|
Ok(character_containers)
|
|
|
|
}
|
2020-06-01 21:34:52 +00:00
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
fn get_pseudo_container_id(
|
|
|
|
connection: VelorenTransaction,
|
|
|
|
character_id: CharacterId,
|
|
|
|
pseudo_container_position: &str,
|
|
|
|
) -> Result<EntityId, Error> {
|
|
|
|
use super::schema::item::dsl::*;
|
|
|
|
match item
|
|
|
|
.select(item_id)
|
|
|
|
.filter(
|
|
|
|
parent_container_item_id
|
|
|
|
.eq(character_id)
|
|
|
|
.and(position.eq(pseudo_container_position)),
|
2020-06-01 21:34:52 +00:00
|
|
|
)
|
2020-09-17 23:02:14 +00:00
|
|
|
.first::<EntityId>(&*connection)
|
2020-06-04 11:44:33 +00:00
|
|
|
{
|
2020-09-17 23:02:14 +00:00
|
|
|
Ok(id) => Ok(id),
|
|
|
|
Err(e) => {
|
|
|
|
error!(
|
|
|
|
?e,
|
|
|
|
?character_id,
|
|
|
|
?pseudo_container_position,
|
|
|
|
"Failed to retrieve pseudo container ID"
|
|
|
|
);
|
|
|
|
Err(DatabaseError(e))
|
|
|
|
},
|
2020-06-04 11:44:33 +00:00
|
|
|
}
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|
|
|
|
|
2020-09-17 23:02:14 +00:00
|
|
|
pub fn update(
|
|
|
|
char_id: CharacterId,
|
|
|
|
char_stats: comp::Stats,
|
|
|
|
inventory: comp::Inventory,
|
2020-11-16 18:49:00 +00:00
|
|
|
char_waypoint: Option<comp::Waypoint>,
|
2020-09-17 23:02:14 +00:00
|
|
|
connection: VelorenTransaction,
|
|
|
|
) -> Result<Vec<Arc<common::comp::item::ItemId>>, Error> {
|
2021-01-01 22:39:36 +00:00
|
|
|
use super::schema::{character::dsl::*, item::dsl::*, skill_group::dsl::*};
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
let pseudo_containers = get_pseudo_containers(connection, char_id)?;
|
|
|
|
|
|
|
|
let mut upserts = Vec::new();
|
|
|
|
|
|
|
|
// First, get all the entity IDs for any new items, and identify which slots to
|
|
|
|
// upsert and which ones to delete.
|
|
|
|
get_new_entity_ids(connection, |mut next_id| {
|
2021-02-18 22:22:15 +00:00
|
|
|
let upserts_ = convert_items_to_database_items(
|
2020-09-17 23:02:14 +00:00
|
|
|
pseudo_containers.loadout_container_id,
|
|
|
|
&inventory,
|
|
|
|
pseudo_containers.inventory_container_id,
|
|
|
|
&mut next_id,
|
|
|
|
);
|
|
|
|
upserts = upserts_;
|
|
|
|
next_id
|
|
|
|
})?;
|
|
|
|
|
|
|
|
// Next, delete any slots we aren't upserting.
|
2020-09-20 04:57:26 +00:00
|
|
|
trace!("Deleting items for character_id {}", char_id);
|
2021-02-18 22:22:15 +00:00
|
|
|
let mut existing_item_ids: Vec<i64> = vec![
|
|
|
|
pseudo_containers.inventory_container_id,
|
|
|
|
pseudo_containers.loadout_container_id,
|
|
|
|
];
|
|
|
|
for it in load_items_bfs(connection, pseudo_containers.inventory_container_id)? {
|
|
|
|
existing_item_ids.push(it.item_id);
|
|
|
|
}
|
|
|
|
for it in load_items_bfs(connection, pseudo_containers.loadout_container_id)? {
|
|
|
|
existing_item_ids.push(it.item_id);
|
|
|
|
}
|
|
|
|
let existing_items = parent_container_item_id.eq_any(existing_item_ids);
|
2020-09-17 23:02:14 +00:00
|
|
|
let non_upserted_items = item_id.ne_all(
|
|
|
|
upserts
|
|
|
|
.iter()
|
|
|
|
.map(|item_pair| item_pair.model.item_id)
|
|
|
|
.collect::<Vec<_>>(),
|
|
|
|
);
|
|
|
|
|
|
|
|
let delete_count = diesel::delete(item.filter(existing_items.and(non_upserted_items)))
|
|
|
|
.execute(&*connection)?;
|
2020-09-20 04:57:26 +00:00
|
|
|
trace!("Deleted {} items", delete_count);
|
2020-09-17 23:02:14 +00:00
|
|
|
|
|
|
|
// Upsert items
|
|
|
|
let expected_upsert_count = upserts.len();
|
|
|
|
let mut upserted_comps = Vec::new();
|
|
|
|
if expected_upsert_count > 0 {
|
|
|
|
let (upserted_items, upserted_comps_): (Vec<_>, Vec<_>) = upserts
|
|
|
|
.into_iter()
|
2021-02-18 22:22:15 +00:00
|
|
|
.map(|model_pair| {
|
|
|
|
debug_assert_eq!(
|
|
|
|
model_pair.model.item_id,
|
|
|
|
model_pair.comp.load().unwrap().get() as i64
|
|
|
|
);
|
|
|
|
(model_pair.model, model_pair.comp)
|
|
|
|
})
|
2020-09-17 23:02:14 +00:00
|
|
|
.unzip();
|
|
|
|
upserted_comps = upserted_comps_;
|
2020-09-20 04:57:26 +00:00
|
|
|
trace!(
|
2020-09-17 23:02:14 +00:00
|
|
|
"Upserting items {:?} for character_id {}",
|
2020-09-20 04:57:26 +00:00
|
|
|
upserted_items,
|
|
|
|
char_id
|
2020-09-17 23:02:14 +00:00
|
|
|
);
|
|
|
|
|
2021-02-18 22:22:15 +00:00
|
|
|
// When moving inventory items around, foreign key constraints on
|
|
|
|
// `parent_container_item_id` can be temporarily violated by one upsert, but
|
|
|
|
// restored by another upsert. Deferred constraints allow SQLite to check this
|
|
|
|
// when committing the transaction. The `defer_foreign_keys` pragma treats the
|
|
|
|
// foreign key constraints as deferred for the next transaction (it turns itself
|
|
|
|
// off at the commit boundary). https://sqlite.org/foreignkeys.html#fk_deferred
|
|
|
|
connection.execute("PRAGMA defer_foreign_keys = ON;")?;
|
2020-09-17 23:02:14 +00:00
|
|
|
let upsert_count = diesel::replace_into(item)
|
|
|
|
.values(&upserted_items)
|
|
|
|
.execute(&*connection)?;
|
2021-02-18 22:22:15 +00:00
|
|
|
trace!("upsert_count: {}", upsert_count);
|
2020-09-17 23:02:14 +00:00
|
|
|
if upsert_count != expected_upsert_count {
|
|
|
|
return Err(Error::OtherError(format!(
|
|
|
|
"Expected upsertions={}, actual={}, for char_id {}--unsafe to continue \
|
|
|
|
transaction.",
|
|
|
|
expected_upsert_count, upsert_count, char_id
|
|
|
|
)));
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|
|
|
|
}
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
let char_skill_set = char_stats.skill_set;
|
2020-12-13 17:21:51 +00:00
|
|
|
|
|
|
|
let db_skill_groups = convert_skill_groups_to_database(char_id, char_skill_set.skill_groups);
|
|
|
|
|
|
|
|
diesel::replace_into(skill_group)
|
|
|
|
.values(&db_skill_groups)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
|
|
|
let db_skills = convert_skills_to_database(char_id, char_skill_set.skills);
|
|
|
|
|
2021-01-02 20:06:33 +00:00
|
|
|
let delete_count = diesel::delete(
|
|
|
|
schema::skill::dsl::skill.filter(
|
2021-01-13 08:11:31 +00:00
|
|
|
schema::skill::dsl::entity_id.eq(char_id).and(
|
2021-01-02 20:06:33 +00:00
|
|
|
schema::skill::dsl::skill_type.ne_all(
|
|
|
|
db_skills
|
|
|
|
.iter()
|
|
|
|
.map(|x| x.skill_type.clone())
|
|
|
|
.collect::<Vec<_>>(),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
),
|
|
|
|
)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
trace!("Deleted {} skills", delete_count);
|
|
|
|
|
2020-12-13 17:21:51 +00:00
|
|
|
diesel::replace_into(schema::skill::dsl::skill)
|
|
|
|
.values(&db_skills)
|
|
|
|
.execute(&*connection)?;
|
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
let db_waypoint = convert_waypoint_to_database_json(char_waypoint);
|
|
|
|
let waypoint_count =
|
|
|
|
diesel::update(character.filter(schema::character::dsl::character_id.eq(char_id)))
|
|
|
|
.set(waypoint.eq(db_waypoint))
|
2020-12-28 23:56:23 +00:00
|
|
|
.execute(&*connection)?;
|
2020-09-17 23:02:14 +00:00
|
|
|
|
2021-01-01 22:39:36 +00:00
|
|
|
if waypoint_count != 1 {
|
2020-09-17 23:02:14 +00:00
|
|
|
return Err(Error::OtherError(format!(
|
2021-01-01 22:39:36 +00:00
|
|
|
"Error updating character table for char_id {}",
|
2020-09-17 23:02:14 +00:00
|
|
|
char_id
|
|
|
|
)));
|
|
|
|
}
|
|
|
|
|
|
|
|
Ok(upserted_comps)
|
2020-06-01 21:34:52 +00:00
|
|
|
}
|