mirror of
https://gitlab.com/veloren/veloren.git
synced 2024-08-30 18:12:32 +00:00
Changed to using a separte pseudo container in the database for overflow items
This commit is contained in:
parent
ccb997cc96
commit
26b1561bca
32
server/src/migrations/V54__overflow_slots.sql
Normal file
32
server/src/migrations/V54__overflow_slots.sql
Normal file
@ -0,0 +1,32 @@
|
||||
CREATE TEMP TABLE _temp_character_overflow_items_pairings
|
||||
(
|
||||
temp_overflow_items_container_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
character_id INT NOT NULL,
|
||||
overflow_items_container_id INT
|
||||
);
|
||||
|
||||
INSERT
|
||||
INTO _temp_character_overflow_items_pairings
|
||||
SELECT NULL,
|
||||
i.item_id,
|
||||
NULL
|
||||
FROM item i
|
||||
WHERE i.item_definition_id = 'veloren.core.pseudo_containers.character';
|
||||
|
||||
UPDATE _temp_character_overflow_items_pairings
|
||||
SET overflow_items_container_id = ((SELECT MAX(entity_id) FROM entity) + temp_overflow_items_container_id);
|
||||
|
||||
INSERT
|
||||
INTO entity
|
||||
SELECT t.overflow_items_container_id
|
||||
FROM _temp_character_overflow_items_pairings t;
|
||||
|
||||
INSERT
|
||||
INTO item
|
||||
SELECT t.overflow_items_container_id,
|
||||
t.character_id,
|
||||
'veloren.core.pseudo_containers.overflow_items',
|
||||
1,
|
||||
'overflow_items',
|
||||
''
|
||||
FROM _temp_character_overflow_items_pairings t;
|
@ -46,14 +46,18 @@ pub(crate) use conversions::convert_waypoint_from_database_json as parse_waypoin
|
||||
const CHARACTER_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.character";
|
||||
const INVENTORY_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.inventory";
|
||||
const LOADOUT_PSEUDO_CONTAINER_DEF_ID: &str = "veloren.core.pseudo_containers.loadout";
|
||||
const OVERFLOW_ITEMS_PSEUDO_CONTAINER_DEF_ID: &str =
|
||||
"veloren.core.pseudo_containers.overflow_items";
|
||||
const INVENTORY_PSEUDO_CONTAINER_POSITION: &str = "inventory";
|
||||
const LOADOUT_PSEUDO_CONTAINER_POSITION: &str = "loadout";
|
||||
const OVERFLOW_ITEMS_PSEUDO_CONTAINER_POSITION: &str = "overflow_items";
|
||||
const WORLD_PSEUDO_CONTAINER_ID: EntityId = 1;
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
struct CharacterContainers {
|
||||
inventory_container_id: EntityId,
|
||||
loadout_container_id: EntityId,
|
||||
overflow_items_container_id: EntityId,
|
||||
}
|
||||
|
||||
/// Load the inventory/loadout
|
||||
@ -126,6 +130,8 @@ pub fn load_character_data(
|
||||
let character_containers = get_pseudo_containers(connection, char_id)?;
|
||||
let inventory_items = load_items(connection, character_containers.inventory_container_id)?;
|
||||
let loadout_items = load_items(connection, character_containers.loadout_container_id)?;
|
||||
let overflow_items_items =
|
||||
load_items(connection, character_containers.overflow_items_container_id)?;
|
||||
|
||||
let mut stmt = connection.prepare_cached(
|
||||
"
|
||||
@ -276,6 +282,8 @@ pub fn load_character_data(
|
||||
&inventory_items,
|
||||
character_containers.loadout_container_id,
|
||||
&loadout_items,
|
||||
character_containers.overflow_items_container_id,
|
||||
&overflow_items_items,
|
||||
)?,
|
||||
waypoint: char_waypoint,
|
||||
pets,
|
||||
@ -383,13 +391,14 @@ pub fn create_character(
|
||||
map_marker,
|
||||
} = persisted_components;
|
||||
|
||||
// Fetch new entity IDs for character, inventory and loadout
|
||||
let mut new_entity_ids = get_new_entity_ids(transaction, |next_id| next_id + 3)?;
|
||||
// Fetch new entity IDs for character, inventory, loadout, and overflow items
|
||||
let mut new_entity_ids = get_new_entity_ids(transaction, |next_id| next_id + 4)?;
|
||||
|
||||
// Create pseudo-container items for character
|
||||
let character_id = new_entity_ids.next().unwrap();
|
||||
let inventory_container_id = new_entity_ids.next().unwrap();
|
||||
let loadout_container_id = new_entity_ids.next().unwrap();
|
||||
let overflow_items_container_id = new_entity_ids.next().unwrap();
|
||||
|
||||
let pseudo_containers = vec![
|
||||
Item {
|
||||
@ -416,6 +425,14 @@ pub fn create_character(
|
||||
position: LOADOUT_PSEUDO_CONTAINER_POSITION.to_owned(),
|
||||
properties: String::new(),
|
||||
},
|
||||
Item {
|
||||
stack_size: 1,
|
||||
item_id: loadout_container_id,
|
||||
parent_container_item_id: character_id,
|
||||
item_definition_id: OVERFLOW_ITEMS_PSEUDO_CONTAINER_DEF_ID.to_owned(),
|
||||
position: OVERFLOW_ITEMS_PSEUDO_CONTAINER_POSITION.to_owned(),
|
||||
properties: String::new(),
|
||||
},
|
||||
];
|
||||
|
||||
let mut stmt = transaction.prepare_cached(
|
||||
@ -524,6 +541,7 @@ pub fn create_character(
|
||||
loadout_container_id,
|
||||
&inventory,
|
||||
inventory_container_id,
|
||||
overflow_items_container_id,
|
||||
&mut next_id,
|
||||
);
|
||||
inserts = inserts_;
|
||||
@ -807,6 +825,11 @@ fn get_pseudo_containers(
|
||||
character_id,
|
||||
INVENTORY_PSEUDO_CONTAINER_POSITION,
|
||||
)?,
|
||||
overflow_items_container_id: get_pseudo_container_id(
|
||||
connection,
|
||||
character_id,
|
||||
OVERFLOW_ITEMS_PSEUDO_CONTAINER_POSITION,
|
||||
)?,
|
||||
};
|
||||
|
||||
Ok(character_containers)
|
||||
@ -1001,6 +1024,7 @@ pub fn update(
|
||||
pseudo_containers.loadout_container_id,
|
||||
&inventory,
|
||||
pseudo_containers.inventory_container_id,
|
||||
pseudo_containers.overflow_items_container_id,
|
||||
&mut next_id,
|
||||
);
|
||||
upserts = upserts_;
|
||||
@ -1012,6 +1036,7 @@ pub fn update(
|
||||
let mut existing_item_ids: Vec<_> = vec![
|
||||
Value::from(pseudo_containers.inventory_container_id),
|
||||
Value::from(pseudo_containers.loadout_container_id),
|
||||
Value::from(pseudo_containers.overflow_items_container_id),
|
||||
];
|
||||
for it in load_items(transaction, pseudo_containers.inventory_container_id)? {
|
||||
existing_item_ids.push(Value::from(it.item_id));
|
||||
@ -1019,6 +1044,9 @@ pub fn update(
|
||||
for it in load_items(transaction, pseudo_containers.loadout_container_id)? {
|
||||
existing_item_ids.push(Value::from(it.item_id));
|
||||
}
|
||||
for it in load_items(transaction, pseudo_containers.overflow_items_container_id)? {
|
||||
existing_item_ids.push(Value::from(it.item_id));
|
||||
}
|
||||
|
||||
let non_upserted_items = upserts
|
||||
.iter()
|
||||
|
@ -56,37 +56,38 @@ pub fn convert_items_to_database_items(
|
||||
loadout_container_id: EntityId,
|
||||
inventory: &Inventory,
|
||||
inventory_container_id: EntityId,
|
||||
overflow_items_container_id: EntityId,
|
||||
next_id: &mut i64,
|
||||
) -> Vec<ItemModelPair> {
|
||||
let loadout = inventory
|
||||
.loadout_items_with_persistence_key()
|
||||
.map(|(slot, item)| (slot.to_string(), item, loadout_container_id));
|
||||
|
||||
let overflow_items = inventory.overflow_items().enumerate().map(|(i, item)| {
|
||||
(
|
||||
serde_json::to_string(&i).expect("failed to serialize index of overflow item"),
|
||||
Some(item),
|
||||
overflow_items_container_id,
|
||||
)
|
||||
});
|
||||
|
||||
// Inventory slots.
|
||||
let inventory = inventory
|
||||
.slots_with_id()
|
||||
.map(|(pos, item)| {
|
||||
(
|
||||
serde_json::to_string(&pos).expect("failed to serialize InvSlotId"),
|
||||
item.as_ref(),
|
||||
inventory_container_id,
|
||||
)
|
||||
})
|
||||
.chain(inventory.overflow_items().enumerate().map(|(index, item)| {
|
||||
(
|
||||
format!("overflow_item {index}"),
|
||||
Some(item),
|
||||
inventory_container_id,
|
||||
)
|
||||
}));
|
||||
let inventory = inventory.slots_with_id().map(|(pos, item)| {
|
||||
(
|
||||
serde_json::to_string(&pos).expect("failed to serialize InvSlotId"),
|
||||
item.as_ref(),
|
||||
inventory_container_id,
|
||||
)
|
||||
});
|
||||
|
||||
// Use Breadth-first search to recurse into containers/modular weapons to store
|
||||
// their parts
|
||||
let mut bfs_queue: VecDeque<_> = inventory.chain(loadout).collect();
|
||||
let mut bfs_queue: VecDeque<_> = inventory.chain(loadout).chain(overflow_items).collect();
|
||||
let mut upserts = Vec::new();
|
||||
let mut depth = HashMap::new();
|
||||
depth.insert(inventory_container_id, 0);
|
||||
depth.insert(loadout_container_id, 0);
|
||||
depth.insert(overflow_items_container_id, 0);
|
||||
while let Some((position, item, parent_container_item_id)) = bfs_queue.pop_front() {
|
||||
// Construct new items.
|
||||
if let Some(item) = item {
|
||||
@ -361,6 +362,8 @@ pub fn convert_inventory_from_database_items(
|
||||
inventory_items: &[Item],
|
||||
loadout_container_id: i64,
|
||||
loadout_items: &[Item],
|
||||
overflow_items_container_id: i64,
|
||||
overflow_items: &[Item],
|
||||
) -> Result<Inventory, PersistenceError> {
|
||||
// Loadout items must be loaded before inventory items since loadout items
|
||||
// provide inventory slots. Since items stored inside loadout items actually
|
||||
@ -369,6 +372,8 @@ pub fn convert_inventory_from_database_items(
|
||||
// inventory at the correct position.
|
||||
//
|
||||
let loadout = convert_loadout_from_database_items(loadout_container_id, loadout_items)?;
|
||||
let overflow_items =
|
||||
convert_overflow_items_from_database_items(overflow_items_container_id, overflow_items)?;
|
||||
let mut inventory = Inventory::with_loadout_humanoid(loadout);
|
||||
let mut item_indices = HashMap::new();
|
||||
|
||||
@ -423,49 +428,34 @@ pub fn convert_inventory_from_database_items(
|
||||
};
|
||||
|
||||
if db_item.parent_container_item_id == inventory_container_id {
|
||||
if db_item.position.contains("overflow_item") {
|
||||
match failed_inserts.insert(db_item.position.clone(), item) {
|
||||
None => {
|
||||
// Insert successful
|
||||
},
|
||||
Some(_item) => {
|
||||
// If insert returns a value, database had two items stored with the same
|
||||
// position which is an error.
|
||||
return Err(PersistenceError::ConversionError(
|
||||
"Inserted an item into the same overflow slot twice".to_string(),
|
||||
));
|
||||
},
|
||||
}
|
||||
} else {
|
||||
match slot(&db_item.position) {
|
||||
Ok(slot) => {
|
||||
let insert_res = inventory.insert_at(slot, item);
|
||||
match slot(&db_item.position) {
|
||||
Ok(slot) => {
|
||||
let insert_res = inventory.insert_at(slot, item);
|
||||
|
||||
match insert_res {
|
||||
Ok(None) => {
|
||||
// Insert successful
|
||||
},
|
||||
Ok(Some(_item)) => {
|
||||
// If inventory.insert returns an item, it means it was swapped for
|
||||
// an item that already occupied the
|
||||
// slot. Multiple items being stored
|
||||
// in the database for the same slot is
|
||||
// an error.
|
||||
return Err(PersistenceError::ConversionError(
|
||||
"Inserted an item into the same slot twice".to_string(),
|
||||
));
|
||||
},
|
||||
Err(item) => {
|
||||
// If this happens there were too many items in the database for the
|
||||
// current inventory size
|
||||
failed_inserts.insert(db_item.position.clone(), item);
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
},
|
||||
}
|
||||
match insert_res {
|
||||
Ok(None) => {
|
||||
// Insert successful
|
||||
},
|
||||
Ok(Some(_item)) => {
|
||||
// If inventory.insert returns an item, it means it was swapped for
|
||||
// an item that already occupied the
|
||||
// slot. Multiple items being stored
|
||||
// in the database for the same slot is
|
||||
// an error.
|
||||
return Err(PersistenceError::ConversionError(
|
||||
"Inserted an item into the same slot twice".to_string(),
|
||||
));
|
||||
},
|
||||
Err(item) => {
|
||||
// If this happens there were too many items in the database for the
|
||||
// current inventory size
|
||||
failed_inserts.insert(db_item.position.clone(), item);
|
||||
},
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
return Err(err);
|
||||
},
|
||||
}
|
||||
} else if let Some(&j) = item_indices.get(&db_item.parent_container_item_id) {
|
||||
get_mutable_item(
|
||||
@ -480,7 +470,6 @@ pub fn convert_inventory_from_database_items(
|
||||
.ok()
|
||||
.and_then(|slot| inv.slot_mut(slot))
|
||||
.and_then(|a| a.as_mut())
|
||||
// .or_else(f_i.map.get(s).and_then(|i| f_i.items.get_mut(*i)))
|
||||
.or_else(|| f_i.get_mut(s))
|
||||
},
|
||||
)?
|
||||
@ -493,9 +482,13 @@ pub fn convert_inventory_from_database_items(
|
||||
}
|
||||
}
|
||||
|
||||
// For failed inserts, attempt to push to inventory. If push fails, move to
|
||||
// overflow slots.
|
||||
if let Err(inv_error) = inventory.push_all(failed_inserts.into_values()) {
|
||||
// For overflow items and failed inserts, attempt to push to inventory. If push
|
||||
// fails, move to overflow slots.
|
||||
if let Err(inv_error) = inventory.push_all(
|
||||
overflow_items
|
||||
.into_iter()
|
||||
.chain(failed_inserts.into_values()),
|
||||
) {
|
||||
inventory.persistence_push_overflow_items(inv_error.returned_items());
|
||||
}
|
||||
|
||||
@ -566,6 +559,86 @@ pub fn convert_loadout_from_database_items(
|
||||
Ok(loadout)
|
||||
}
|
||||
|
||||
pub fn convert_overflow_items_from_database_items(
|
||||
overflow_items_container_id: i64,
|
||||
database_items: &[Item],
|
||||
) -> Result<Vec<VelorenItem>, PersistenceError> {
|
||||
let mut overflow_items_with_database_position = HashMap::new();
|
||||
let mut item_indices = HashMap::new();
|
||||
|
||||
// In order to items with components to properly load, it is important that this
|
||||
// item iteration occurs in order so that any modular items are loaded before
|
||||
// its components.
|
||||
for (i, db_item) in database_items.iter().enumerate() {
|
||||
item_indices.insert(db_item.item_id, i);
|
||||
|
||||
let mut item = get_item_from_asset(db_item.item_definition_id.as_str())?;
|
||||
let item_properties =
|
||||
serde_json::de::from_str::<DatabaseItemProperties>(&db_item.properties)?;
|
||||
json_models::apply_db_item_properties(&mut item, &item_properties);
|
||||
|
||||
// NOTE: item id is currently *unique*, so we can store the ID safely.
|
||||
let comp = item.get_item_id_for_database();
|
||||
|
||||
// Item ID
|
||||
comp.store(Some(NonZeroU64::try_from(db_item.item_id as u64).map_err(
|
||||
|_| PersistenceError::ConversionError("Item with zero item_id".to_owned()),
|
||||
)?));
|
||||
|
||||
// Stack Size
|
||||
if db_item.stack_size == 1 || item.is_stackable() {
|
||||
// FIXME: On failure, collect the set of items that don't fit and return them
|
||||
// (to be dropped next to the player) as this could be the result of
|
||||
// a change in the max amount for that item.
|
||||
item.set_amount(u32::try_from(db_item.stack_size).map_err(|_| {
|
||||
PersistenceError::ConversionError(format!(
|
||||
"Invalid item stack size for stackable={}: {}",
|
||||
item.is_stackable(),
|
||||
&db_item.stack_size
|
||||
))
|
||||
})?)
|
||||
.map_err(|_| {
|
||||
PersistenceError::ConversionError("Error setting amount for item".to_owned())
|
||||
})?;
|
||||
}
|
||||
|
||||
if db_item.parent_container_item_id == overflow_items_container_id {
|
||||
match overflow_items_with_database_position.insert(db_item.position.clone(), item) {
|
||||
None => {
|
||||
// Insert successful
|
||||
},
|
||||
Some(_item) => {
|
||||
// If insert returns a value, database had two items stored with the same
|
||||
// position which is an error.
|
||||
return Err(PersistenceError::ConversionError(
|
||||
"Inserted an item into the same overflow slot twice".to_string(),
|
||||
));
|
||||
},
|
||||
}
|
||||
} else if let Some(&j) = item_indices.get(&db_item.parent_container_item_id) {
|
||||
get_mutable_item(
|
||||
j,
|
||||
database_items,
|
||||
&item_indices,
|
||||
&mut overflow_items_with_database_position,
|
||||
&|o_i, s| o_i.get_mut(s),
|
||||
)?
|
||||
.persistence_access_add_component(item);
|
||||
} else {
|
||||
return Err(PersistenceError::ConversionError(format!(
|
||||
"Couldn't find parent item {} before item {} in overflow items",
|
||||
db_item.parent_container_item_id, db_item.item_id
|
||||
)));
|
||||
}
|
||||
}
|
||||
|
||||
let overflow_items = overflow_items_with_database_position
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(overflow_items)
|
||||
}
|
||||
|
||||
fn get_item_from_asset(item_definition_id: &str) -> Result<common::comp::Item, PersistenceError> {
|
||||
common::comp::Item::new_from_asset(item_definition_id).map_err(|err| {
|
||||
PersistenceError::AssetError(format!(
|
||||
|
Loading…
Reference in New Issue
Block a user