veloren_server/persistence/character/
conversions.rs

1use crate::persistence::{
2    character::EntityId,
3    error::PersistenceError,
4    json_models::{
5        self, CharacterPosition, DatabaseAbilitySet, DatabaseItemProperties, GenericBody,
6        HumanoidBody,
7    },
8    models::{AbilitySets, Character, Item, SkillGroup},
9};
10use common::{
11    character::CharacterId,
12    comp::{
13        ActiveAbilities, Body as CompBody, Content, Hardcore, Inventory, MapMarker, Stats,
14        Waypoint, body,
15        inventory::{
16            item::{Item as VelorenItem, MaterialStatManifest, tool::AbilityMap},
17            loadout::{Loadout, LoadoutError},
18            loadout_builder::LoadoutBuilder,
19            recipe_book::RecipeBook,
20            slot::InvSlotId,
21        },
22        item,
23        skillset::{self, SkillGroupKind, SkillSet, skills::Skill},
24    },
25    resources::Time,
26};
27use core::{convert::TryFrom, num::NonZeroU64};
28use hashbrown::HashMap;
29use lazy_static::lazy_static;
30use std::{collections::VecDeque, str::FromStr, sync::Arc};
31use tracing::{trace, warn};
32
33#[derive(Debug)]
34pub struct ItemModelPair {
35    pub comp: Arc<item::ItemId>,
36    pub model: Item,
37}
38
39// Decoupled from the ECS resource because the plumbing is getting complicated;
40// shouldn't matter unless someone's hot-reloading material stats on the live
41// server
42lazy_static! {
43    pub static ref MATERIAL_STATS_MANIFEST: MaterialStatManifest =
44        MaterialStatManifest::load().cloned();
45    pub static ref ABILITY_MAP: AbilityMap = AbilityMap::load().cloned();
46}
47
48/// Returns a vector that contains all item rows to upsert; parent is
49/// responsible for deleting items from the same owner that aren't affirmatively
50/// kept by this.
51///
52/// NOTE: This method does not yet handle persisting nested items within
53/// inventories. Although loadout items do store items inside them this does
54/// not currently utilise `parent_container_id` - all loadout items have the
55/// loadout pseudo-container as their parent.
56pub fn convert_items_to_database_items(
57    loadout_container_id: EntityId,
58    inventory: &Inventory,
59    inventory_container_id: EntityId,
60    overflow_items_container_id: EntityId,
61    recipe_book_container_id: EntityId,
62    next_id: &mut i64,
63) -> Vec<ItemModelPair> {
64    let loadout = inventory
65        .loadout_items_with_persistence_key()
66        .map(|(slot, item)| (slot.to_string(), item, loadout_container_id));
67
68    let overflow_items = inventory.overflow_items().enumerate().map(|(i, item)| {
69        (
70            serde_json::to_string(&i).expect("failed to serialize index of overflow item"),
71            Some(item),
72            overflow_items_container_id,
73        )
74    });
75
76    let recipe_book = inventory
77        .persistence_recipes_iter_with_index()
78        .map(|(i, item)| {
79            (
80                serde_json::to_string(&i)
81                    .expect("failed to serialize index of recipe from recipe book"),
82                Some(item),
83                recipe_book_container_id,
84            )
85        });
86    // Inventory slots.
87    let inventory = inventory.slots_with_id().map(|(pos, item)| {
88        (
89            serde_json::to_string(&pos).expect("failed to serialize InvSlotId"),
90            item.as_ref(),
91            inventory_container_id,
92        )
93    });
94
95    // Use Breadth-first search to recurse into containers/modular weapons to store
96    // their parts
97    let mut bfs_queue: VecDeque<_> = inventory
98        .chain(loadout)
99        .chain(overflow_items)
100        .chain(recipe_book)
101        .collect();
102    let mut upserts = Vec::new();
103    let mut depth = HashMap::new();
104    depth.insert(inventory_container_id, 0);
105    depth.insert(loadout_container_id, 0);
106    depth.insert(overflow_items_container_id, 0);
107    depth.insert(recipe_book_container_id, 0);
108    while let Some((position, item, parent_container_item_id)) = bfs_queue.pop_front() {
109        // Construct new items.
110        if let Some(item) = item {
111            // Try using the next available id in the sequence as the default for new items.
112            let new_item_id = NonZeroU64::new(u64::try_from(*next_id).expect(
113                "We are willing to crash if the next entity id overflows (or is otherwise \
114                 negative).",
115            ))
116            .expect("next_id should not be zero, either");
117
118            // Fast (kinda) path: acquire read for the common case where an id has
119            // already been assigned.
120            let comp = item.get_item_id_for_database();
121            let item_id = comp.load()
122                // First, we filter out "impossible" entity IDs--IDs that are larger
123                // than the maximum sequence value (next_id).  This is important
124                // because we update the item ID atomically, *before* we know whether
125                // this transaction has completed successfully, and we don't abort the
126                // process on a failed transaction.  In such cases, new IDs from
127                // aborted transactions will show up as having a higher value than the
128                // current max sequence number.  Because the only place that modifies
129                // the item_id through a shared reference is (supposed to be) this
130                // function, which is part of the batch update transaction, we can
131                // assume that any rollback during the update would fail to insert
132                // *any* new items for the current character; this means that any items
133                // inserted between the failure and now (i.e. values less than next_id)
134                // would either not be items at all, or items belonging to other
135                // characters, leading to an easily detectable SQLite failure that we
136                // can use to atomically set the id back to None (if it was still the
137                // same bad value).
138                //
139                // Note that this logic only requires that all the character's items be
140                // updated within the same serializable transaction; the argument does
141                // not depend on SQLite-specific details (like locking) or on the fact
142                // that a user's transactions are always serialized on their own
143                // session.  Also note that since these IDs are in-memory, we don't
144                // have to worry about their values during, e.g., a process crash;
145                // serializability will take care of us in those cases.  Finally, note
146                // that while we have not yet implemented the "liveness" part of the
147                // algorithm (resetting ids back to None if we detect errors), this is
148                // not needed for soundness, and this part can be deferred until we
149                // switch to an execution model where such races are actually possible
150                // during normal gameplay.
151                .and_then(|item_id| Some(if item_id >= new_item_id {
152                    // Try to atomically exchange with our own, "correct" next id.
153                    match comp.compare_exchange(Some(item_id), Some(new_item_id)) {
154                        Ok(_) => {
155                            let item_id = *next_id;
156                            // We won the race, use next_id and increment it.
157                            *next_id += 1;
158                            item_id
159                        },
160                        Err(item_id) => {
161                            // We raced with someone, and they won the race, so we know
162                            // this transaction must abort unless they finish first.  So,
163                            // just assume they will finish first, and use their assigned
164                            // item_id.
165                            EntityId::try_from(item_id?.get())
166                                .expect("We always choose legal EntityIds as item ids")
167                        },
168                    }
169                } else { EntityId::try_from(item_id.get()).expect("We always choose legal EntityIds as item ids") }))
170                // Finally, we're in the case where no entity was assigned yet (either
171                // ever, or due to corrections after a rollback).  This proceeds
172                // identically to the "impossible ID" case.
173                .unwrap_or_else(|| {
174                    // Try to atomically compare with the empty id.
175                    match comp.compare_exchange(None, Some(new_item_id)) {
176                        Ok(_) => {
177                            let item_id = *next_id;
178                            *next_id += 1;
179                            item_id
180                        },
181                        Err(item_id) => {
182                            EntityId::try_from(item_id.expect("TODO: Fix handling of reset to None when we have concurrent writers.").get())
183                                .expect("We always choose legal EntityIds as item ids")
184                        },
185                    }
186                });
187
188            depth.insert(item_id, depth[&parent_container_item_id] + 1);
189
190            for (i, component) in item.components().iter().enumerate() {
191                // recursive items' children have the same position as their parents, and since
192                // they occur afterwards in the topological sort of the parent graph (which
193                // should still always be a tree, even with recursive items), we
194                // have enough information to put them back into their parents on load
195                bfs_queue.push_back((format!("component_{}", i), Some(component), item_id));
196            }
197
198            let item_properties = json_models::item_properties_to_db_model(item);
199
200            let upsert = ItemModelPair {
201                model: Item {
202                    item_definition_id: item.persistence_item_id(),
203                    position,
204                    parent_container_item_id,
205                    item_id,
206                    stack_size: if item.is_stackable() {
207                        item.amount().into()
208                    } else {
209                        1
210                    },
211                    properties: serde_json::to_string(&item_properties)
212                        .expect("Failed to convert item properties to a json string."),
213                },
214                // Continue to remember the atomic, in case we detect an error later and want
215                // to roll back to preserve liveness.
216                comp,
217            };
218            upserts.push(upsert);
219        }
220    }
221    upserts.sort_by_key(|pair| (depth[&pair.model.item_id], pair.model.item_id));
222    trace!("upserts: {:#?}", upserts);
223    upserts
224}
225
226pub fn convert_body_to_database_json(
227    comp_body: &CompBody,
228) -> Result<(&str, String), PersistenceError> {
229    Ok(match comp_body {
230        CompBody::Humanoid(body) => (
231            "humanoid",
232            serde_json::to_string(&HumanoidBody::from(body))?,
233        ),
234        CompBody::QuadrupedLow(body) => (
235            "quadruped_low",
236            serde_json::to_string(&GenericBody::from(body))?,
237        ),
238        CompBody::QuadrupedMedium(body) => (
239            "quadruped_medium",
240            serde_json::to_string(&GenericBody::from(body))?,
241        ),
242        CompBody::QuadrupedSmall(body) => (
243            "quadruped_small",
244            serde_json::to_string(&GenericBody::from(body))?,
245        ),
246        CompBody::BirdMedium(body) => (
247            "bird_medium",
248            serde_json::to_string(&GenericBody::from(body))?,
249        ),
250        CompBody::Crustacean(body) => (
251            "crustacean",
252            serde_json::to_string(&GenericBody::from(body))?,
253        ),
254        _ => {
255            return Err(PersistenceError::ConversionError(format!(
256                "Unsupported body type for persistence: {:?}",
257                comp_body
258            )));
259        },
260    })
261}
262
263pub fn convert_waypoint_to_database_json(
264    waypoint: Option<Waypoint>,
265    map_marker: Option<MapMarker>,
266) -> Option<String> {
267    if waypoint.is_some() || map_marker.is_some() {
268        let charpos = CharacterPosition {
269            waypoint: waypoint.map(|w| w.get_pos()),
270            map_marker: map_marker.map(|m| m.0),
271        };
272        Some(
273            serde_json::to_string(&charpos)
274                .map_err(|err| {
275                    PersistenceError::ConversionError(format!("Error encoding waypoint: {:?}", err))
276                })
277                .ok()?,
278        )
279    } else {
280        None
281    }
282}
283
284pub fn convert_waypoint_from_database_json(
285    position: &str,
286) -> Result<(Option<Waypoint>, Option<MapMarker>), PersistenceError> {
287    let character_position =
288        serde_json::de::from_str::<CharacterPosition>(position).map_err(|err| {
289            PersistenceError::ConversionError(format!(
290                "Error de-serializing waypoint: {} err: {}",
291                position, err
292            ))
293        })?;
294    Ok((
295        character_position
296            .waypoint
297            .map(|pos| Waypoint::new(pos, Time(0.0))),
298        character_position.map_marker.map(MapMarker),
299    ))
300}
301
302// Used to handle cases of modular items that are composed of components.
303// When called with the index of a component's parent item, it can get a mutable
304// reference to that parent item so that the component can be added to the
305// parent item. If the item corresponding to the index this is called on is
306// itself a component, recursively goes through inventory until it grabs
307// component.
308fn get_mutable_item<'a, 'b, T>(
309    index: usize,
310    inventory_items: &'a [Item],
311    item_indices: &'a HashMap<i64, usize>,
312    inventory: &'b mut T,
313    get_mut_item: &'a impl Fn(&'b mut T, &str) -> Option<&'b mut VelorenItem>,
314) -> Result<&'a mut VelorenItem, PersistenceError>
315where
316    'b: 'a,
317{
318    // First checks if item is a component, if it is, tries to get a mutable
319    // reference to itself by getting a mutable reference to the item that is its
320    // parent
321    //
322    // It is safe to directly index into `inventory_items` with `index` as the
323    // parent item of a component is loaded before its components, therefore the
324    // index of a parent item should exist when loading the component.
325    let parent_id = inventory_items[index].parent_container_item_id;
326    if inventory_items[index].position.contains("component_") {
327        if let Some(parent) = item_indices.get(&parent_id).map(move |i| {
328            get_mutable_item(
329                *i,
330                inventory_items,
331                item_indices,
332                inventory,
333                // slot,
334                get_mut_item,
335            )
336        }) {
337            // Parses component index
338            let position = &inventory_items[index].position;
339            let component_index = position
340                .split('_')
341                .nth(1)
342                .and_then(|s| s.parse::<usize>().ok())
343                .ok_or_else(|| {
344                    PersistenceError::ConversionError(format!(
345                        "Failed to parse position stored in database: {position}."
346                    ))
347                })?;
348            // Returns mutable reference to component item by accessing the component
349            // through its parent item item
350            parent?
351                .persistence_access_mutable_component(component_index)
352                .ok_or_else(|| {
353                    PersistenceError::ConversionError(format!(
354                        "Component in position {component_index} doesn't exist on parent item \
355                         {parent_id}."
356                    ))
357                })
358        } else {
359            Err(PersistenceError::ConversionError(format!(
360                "Parent item with id {parent_id} does not exist in database."
361            )))
362        }
363    } else {
364        get_mut_item(inventory, &inventory_items[index].position).ok_or_else(|| {
365            PersistenceError::ConversionError(format!(
366                "Unable to retrieve parent veloren item {parent_id} of component from inventory."
367            ))
368        })
369    }
370}
371
372/// Properly-recursive items (currently modular weapons) occupy the same
373/// inventory slot as their parent. The caller is responsible for ensuring that
374/// inventory_items and loadout_items are topologically sorted (i.e. forall i,
375/// `items[i].parent_container_item_id == x` implies exists j < i satisfying
376/// `items[j].item_id == x`)
377pub fn convert_inventory_from_database_items(
378    inventory_container_id: i64,
379    inventory_items: &[Item],
380    loadout_container_id: i64,
381    loadout_items: &[Item],
382    overflow_items_container_id: i64,
383    overflow_items: &[Item],
384    recipe_book_items: &[Item],
385) -> Result<Inventory, PersistenceError> {
386    // Loadout items must be loaded before inventory items since loadout items
387    // provide inventory slots. Since items stored inside loadout items actually
388    // have their parent_container_item_id as the loadout pseudo-container we rely
389    // on populating the loadout items first, and then inserting the items into the
390    // inventory at the correct position.
391    //
392    let loadout = convert_loadout_from_database_items(loadout_container_id, loadout_items)?;
393    let overflow_items =
394        convert_overflow_items_from_database_items(overflow_items_container_id, overflow_items)?;
395    let recipe_book = convert_recipe_book_from_database_items(recipe_book_items)?;
396    let mut inventory = Inventory::with_loadout_humanoid(loadout).with_recipe_book(recipe_book);
397    let mut item_indices = HashMap::new();
398
399    let mut failed_inserts = HashMap::new();
400
401    // In order to items with components to properly load, it is important that this
402    // item iteration occurs in order so that any modular items are loaded before
403    // its components.
404    for (i, db_item) in inventory_items.iter().enumerate() {
405        item_indices.insert(db_item.item_id, i);
406
407        let mut item = get_item_from_asset(db_item.item_definition_id.as_str())?;
408        let item_properties =
409            serde_json::de::from_str::<DatabaseItemProperties>(&db_item.properties)?;
410        json_models::apply_db_item_properties(&mut item, &item_properties);
411
412        // NOTE: Since this is freshly loaded, the atomic is *unique.*
413        let comp = item.get_item_id_for_database();
414
415        // Item ID
416        comp.store(Some(NonZeroU64::try_from(db_item.item_id as u64).map_err(
417            |_| PersistenceError::ConversionError("Item with zero item_id".to_owned()),
418        )?));
419
420        // Stack Size
421        if db_item.stack_size == 1 || item.is_stackable() {
422            // FIXME: On failure, collect the set of items that don't fit and return them
423            // (to be dropped next to the player) as this could be the result of
424            // a change in the max amount for that item.
425            item.set_amount(u32::try_from(db_item.stack_size).map_err(|_| {
426                PersistenceError::ConversionError(format!(
427                    "Invalid item stack size for stackable={}: {}",
428                    item.is_stackable(),
429                    &db_item.stack_size
430                ))
431            })?)
432            .map_err(|_| {
433                PersistenceError::ConversionError("Error setting amount for item".to_owned())
434            })?;
435        }
436
437        // Insert item into inventory
438
439        // Slot position
440        let slot = |s: &str| {
441            serde_json::from_str::<InvSlotId>(s).map_err(|_| {
442                PersistenceError::ConversionError(format!(
443                    "Failed to parse item position: {:?}",
444                    &db_item.position
445                ))
446            })
447        };
448
449        if db_item.parent_container_item_id == inventory_container_id {
450            match slot(&db_item.position) {
451                Ok(slot) => {
452                    let insert_res = inventory.insert_at(slot, item);
453
454                    match insert_res {
455                        Ok(None) => {
456                            // Insert successful
457                        },
458                        Ok(Some(_item)) => {
459                            // If inventory.insert returns an item, it means it was swapped for
460                            // an item that already occupied the
461                            // slot. Multiple items being stored
462                            // in the database for the same slot is
463                            // an error.
464                            return Err(PersistenceError::ConversionError(
465                                "Inserted an item into the same slot twice".to_string(),
466                            ));
467                        },
468                        Err(item) => {
469                            // If this happens there were too many items in the database for the
470                            // current inventory size
471                            failed_inserts.insert(db_item.position.clone(), item);
472                        },
473                    }
474                },
475                Err(err) => {
476                    return Err(err);
477                },
478            }
479        } else if let Some(&j) = item_indices.get(&db_item.parent_container_item_id) {
480            get_mutable_item(
481                j,
482                inventory_items,
483                &item_indices,
484                &mut (&mut inventory, &mut failed_inserts),
485                &|(inv, f_i): &mut (&mut Inventory, &mut HashMap<String, VelorenItem>), s| {
486                    // Attempts first to access inventory if that slot exists there. If it does not
487                    // it instead attempts to access failed inserts list.
488                    slot(s)
489                        .ok()
490                        .and_then(|slot| inv.slot_mut(slot))
491                        .and_then(|a| a.as_mut())
492                        .or_else(|| f_i.get_mut(s))
493                },
494            )?
495            .persistence_access_add_component(item);
496        } else {
497            return Err(PersistenceError::ConversionError(format!(
498                "Couldn't find parent item {} before item {} in inventory",
499                db_item.parent_container_item_id, db_item.item_id
500            )));
501        }
502    }
503
504    // For overflow items and failed inserts, attempt to push to inventory. If push
505    // fails, move to overflow slots.
506    if let Err(inv_error) = inventory.push_all(
507        overflow_items
508            .into_iter()
509            .chain(failed_inserts.into_values()),
510    ) {
511        inventory.persistence_push_overflow_items(inv_error.returned_items());
512    }
513
514    // Some items may have had components added, so update the item config of each
515    // item to ensure that it correctly accounts for components that were added
516    inventory.persistence_update_all_item_states(&ABILITY_MAP, &MATERIAL_STATS_MANIFEST);
517
518    Ok(inventory)
519}
520
521pub fn convert_loadout_from_database_items(
522    loadout_container_id: i64,
523    database_items: &[Item],
524) -> Result<Loadout, PersistenceError> {
525    let loadout_builder = LoadoutBuilder::empty();
526    let mut loadout = loadout_builder.build();
527    let mut item_indices = HashMap::new();
528
529    // In order to items with components to properly load, it is important that this
530    // item iteration occurs in order so that any modular items are loaded before
531    // its components.
532    for (i, db_item) in database_items.iter().enumerate() {
533        item_indices.insert(db_item.item_id, i);
534
535        let mut item = get_item_from_asset(db_item.item_definition_id.as_str())?;
536        let item_properties =
537            serde_json::de::from_str::<DatabaseItemProperties>(&db_item.properties)?;
538        json_models::apply_db_item_properties(&mut item, &item_properties);
539
540        // NOTE: item id is currently *unique*, so we can store the ID safely.
541        let comp = item.get_item_id_for_database();
542        comp.store(Some(NonZeroU64::try_from(db_item.item_id as u64).map_err(
543            |_| PersistenceError::ConversionError("Item with zero item_id".to_owned()),
544        )?));
545
546        let convert_error = |err| match err {
547            LoadoutError::InvalidPersistenceKey => PersistenceError::ConversionError(format!(
548                "Invalid persistence key: {}",
549                &db_item.position
550            )),
551            LoadoutError::NoParentAtSlot => PersistenceError::ConversionError(format!(
552                "No parent item at slot: {}",
553                &db_item.position
554            )),
555        };
556
557        if db_item.parent_container_item_id == loadout_container_id {
558            loadout
559                .set_item_at_slot_using_persistence_key(&db_item.position, item)
560                .map_err(convert_error)?;
561        } else if let Some(&j) = item_indices.get(&db_item.parent_container_item_id) {
562            get_mutable_item(j, database_items, &item_indices, &mut loadout, &|l, s| {
563                l.get_mut_item_at_slot_using_persistence_key(s).ok()
564            })?
565            .persistence_access_add_component(item);
566        } else {
567            return Err(PersistenceError::ConversionError(format!(
568                "Couldn't find parent item {} before item {} in loadout",
569                db_item.parent_container_item_id, db_item.item_id
570            )));
571        }
572    }
573
574    // Some items may have had components added, so update the item config of each
575    // item to ensure that it correctly accounts for components that were added
576    loadout.persistence_update_all_item_states(&ABILITY_MAP, &MATERIAL_STATS_MANIFEST);
577
578    Ok(loadout)
579}
580
581pub fn convert_overflow_items_from_database_items(
582    overflow_items_container_id: i64,
583    database_items: &[Item],
584) -> Result<Vec<VelorenItem>, PersistenceError> {
585    let mut overflow_items_with_database_position = HashMap::new();
586    let mut item_indices = HashMap::new();
587
588    // In order to items with components to properly load, it is important that this
589    // item iteration occurs in order so that any modular items are loaded before
590    // its components.
591    for (i, db_item) in database_items.iter().enumerate() {
592        item_indices.insert(db_item.item_id, i);
593
594        let mut item = get_item_from_asset(db_item.item_definition_id.as_str())?;
595        let item_properties =
596            serde_json::de::from_str::<DatabaseItemProperties>(&db_item.properties)?;
597        json_models::apply_db_item_properties(&mut item, &item_properties);
598
599        // NOTE: item id is currently *unique*, so we can store the ID safely.
600        let comp = item.get_item_id_for_database();
601
602        // Item ID
603        comp.store(Some(NonZeroU64::try_from(db_item.item_id as u64).map_err(
604            |_| PersistenceError::ConversionError("Item with zero item_id".to_owned()),
605        )?));
606
607        // Stack Size
608        if db_item.stack_size == 1 || item.is_stackable() {
609            // FIXME: On failure, collect the set of items that don't fit and return them
610            // (to be dropped next to the player) as this could be the result of
611            // a change in the max amount for that item.
612            item.set_amount(u32::try_from(db_item.stack_size).map_err(|_| {
613                PersistenceError::ConversionError(format!(
614                    "Invalid item stack size for stackable={}: {}",
615                    item.is_stackable(),
616                    &db_item.stack_size
617                ))
618            })?)
619            .map_err(|_| {
620                PersistenceError::ConversionError("Error setting amount for item".to_owned())
621            })?;
622        }
623
624        if db_item.parent_container_item_id == overflow_items_container_id {
625            match overflow_items_with_database_position.insert(db_item.position.clone(), item) {
626                None => {
627                    // Insert successful
628                },
629                Some(_item) => {
630                    // If insert returns a value, database had two items stored with the same
631                    // position which is an error.
632                    return Err(PersistenceError::ConversionError(
633                        "Inserted an item into the same overflow slot twice".to_string(),
634                    ));
635                },
636            }
637        } else if let Some(&j) = item_indices.get(&db_item.parent_container_item_id) {
638            get_mutable_item(
639                j,
640                database_items,
641                &item_indices,
642                &mut overflow_items_with_database_position,
643                &|o_i, s| o_i.get_mut(s),
644            )?
645            .persistence_access_add_component(item);
646        } else {
647            return Err(PersistenceError::ConversionError(format!(
648                "Couldn't find parent item {} before item {} in overflow items",
649                db_item.parent_container_item_id, db_item.item_id
650            )));
651        }
652    }
653
654    let overflow_items = overflow_items_with_database_position
655        .into_values()
656        .collect::<Vec<_>>();
657
658    Ok(overflow_items)
659}
660
661fn get_item_from_asset(item_definition_id: &str) -> Result<common::comp::Item, PersistenceError> {
662    common::comp::Item::new_from_asset(item_definition_id).map_err(|err| {
663        PersistenceError::AssetError(format!(
664            "Error loading item asset: {} - {}",
665            item_definition_id, err
666        ))
667    })
668}
669
670/// Generates the code to deserialize a specific body variant from JSON
671macro_rules! deserialize_body {
672    ($body_data:expr, $body_variant:tt, $body_type:tt) => {{
673        let json_model = serde_json::de::from_str::<GenericBody>($body_data)?;
674        CompBody::$body_variant(common::comp::$body_type::Body {
675            species: common::comp::$body_type::Species::from_str(&json_model.species)
676                .map_err(|_| {
677                    PersistenceError::ConversionError(format!(
678                        "Missing species: {}",
679                        json_model.species
680                    ))
681                })?
682                .to_owned(),
683            body_type: common::comp::$body_type::BodyType::from_str(&json_model.body_type)
684                .map_err(|_| {
685                    PersistenceError::ConversionError(format!(
686                        "Missing body type: {}",
687                        json_model.species
688                    ))
689                })?
690                .to_owned(),
691        })
692    }};
693}
694pub fn convert_body_from_database(
695    variant: &str,
696    body_data: &str,
697) -> Result<CompBody, PersistenceError> {
698    Ok(match variant {
699        // The humanoid variant doesn't use the body_variant! macro as it is unique in having
700        // extra fields on its body struct
701        "humanoid" => {
702            let json_model = serde_json::de::from_str::<HumanoidBody>(body_data)?;
703            CompBody::Humanoid(body::humanoid::Body {
704                species: body::humanoid::ALL_SPECIES
705                    .get(json_model.species as usize)
706                    .ok_or_else(|| {
707                        PersistenceError::ConversionError(format!(
708                            "Missing species: {}",
709                            json_model.species
710                        ))
711                    })?
712                    .to_owned(),
713                body_type: body::humanoid::ALL_BODY_TYPES
714                    .get(json_model.body_type as usize)
715                    .ok_or_else(|| {
716                        PersistenceError::ConversionError(format!(
717                            "Missing body_type: {}",
718                            json_model.body_type
719                        ))
720                    })?
721                    .to_owned(),
722                hair_style: json_model.hair_style,
723                beard: json_model.beard,
724                eyes: json_model.eyes,
725                accessory: json_model.accessory,
726                hair_color: json_model.hair_color,
727                skin: json_model.skin,
728                eye_color: json_model.eye_color,
729            })
730        },
731        "quadruped_low" => {
732            deserialize_body!(body_data, QuadrupedLow, quadruped_low)
733        },
734        "quadruped_medium" => {
735            deserialize_body!(body_data, QuadrupedMedium, quadruped_medium)
736        },
737        "quadruped_small" => {
738            deserialize_body!(body_data, QuadrupedSmall, quadruped_small)
739        },
740        "bird_medium" => {
741            deserialize_body!(body_data, BirdMedium, bird_medium)
742        },
743        "crustacean" => {
744            deserialize_body!(body_data, Crustacean, crustacean)
745        },
746        _ => {
747            return Err(PersistenceError::ConversionError(format!(
748                "{} is not a supported body type for deserialization",
749                variant
750            )));
751        },
752    })
753}
754
755pub fn convert_character_from_database(character: &Character) -> common::character::Character {
756    common::character::Character {
757        id: Some(CharacterId(character.character_id)),
758        alias: String::from(&character.alias),
759    }
760}
761
762pub fn convert_stats_from_database(alias: String, body: CompBody) -> Stats {
763    let mut new_stats = Stats::empty(body);
764    new_stats.name = Content::Plain(alias);
765    new_stats
766}
767
768pub fn convert_hardcore_from_database(hardcore: i64) -> Result<Option<Hardcore>, PersistenceError> {
769    match hardcore {
770        0 => Ok(None),
771        1 => Ok(Some(common::comp::Hardcore)),
772        _ => Err(PersistenceError::ConversionError(format!(
773            "Invalid hardcore field: {hardcore}"
774        ))),
775    }
776}
777
778pub fn convert_hardcore_to_database(hardcore: Option<Hardcore>) -> i64 {
779    if hardcore.is_some() { 1 } else { 0 }
780}
781
782/// NOTE: This does *not* return an error on failure, since we can partially
783/// recover from some failures.  Instead, it returns the error in the second
784/// return value; make sure to handle it if present!
785pub fn convert_skill_set_from_database(
786    skill_groups: &[SkillGroup],
787) -> (SkillSet, Option<skillset::SkillsPersistenceError>) {
788    let (skillless_skill_groups, deserialized_skills) =
789        convert_skill_groups_from_database(skill_groups);
790    SkillSet::load_from_database(skillless_skill_groups, deserialized_skills)
791}
792
793fn convert_skill_groups_from_database(
794    skill_groups: &[SkillGroup],
795) -> (
796    // Skill groups in the vec do not contain skills, those are added later. The skill group only
797    // contains fields related to experience and skill points
798    HashMap<SkillGroupKind, skillset::SkillGroup>,
799    //
800    HashMap<SkillGroupKind, Result<Vec<Skill>, skillset::SkillsPersistenceError>>,
801) {
802    let mut new_skill_groups = HashMap::new();
803    let mut deserialized_skills = HashMap::new();
804    for skill_group in skill_groups.iter() {
805        let skill_group_kind = json_models::db_string_to_skill_group(&skill_group.skill_group_kind);
806        let mut new_skill_group = skillset::SkillGroup {
807            skill_group_kind,
808            // Available and earned exp and sp are reconstructed below
809            earned_exp: 0,
810            available_exp: 0,
811            available_sp: 0,
812            earned_sp: 0,
813            // Ordered skills empty here as skills get inserted later as they are unlocked, so long
814            // as there is not a respec.
815            ordered_skills: Vec::new(),
816        };
817
818        // Add experience to skill group through method to ensure invariant of
819        // (earned_exp >= available_exp) are maintained
820        // Adding experience will automatically earn all possible skill points
821        let skill_group_exp = skill_group.earned_exp.clamp(0, i64::from(u32::MAX)) as u32;
822        new_skill_group.add_experience(skill_group_exp);
823
824        use skillset::SkillsPersistenceError;
825
826        let skills_result = if skill_group.spent_exp != i64::from(new_skill_group.spent_exp()) {
827            // If persisted spent exp does not equal the spent exp after reacquiring skill
828            // points, force a respec
829            Err(SkillsPersistenceError::SpentExpMismatch)
830        } else if Some(&skill_group.hash_val) != skillset::SKILL_GROUP_HASHES.get(&skill_group_kind)
831        {
832            // Else if persisted hash for skill group does not match current hash for skill
833            // group, force a respec
834            Err(SkillsPersistenceError::HashMismatch)
835        } else {
836            // Else attempt to deserialize skills from a json string
837            match serde_json::from_str::<Vec<Skill>>(&skill_group.skills) {
838                // If it correctly deserializes, return the persisted skills
839                Ok(skills) => Ok(skills),
840                // Else if doesn't deserialize correctly, force a respec
841                Err(err) => {
842                    warn!(
843                        "Skills failed to correctly deserialized\nError: {:#?}\nRaw JSON: {:#?}",
844                        err, &skill_group.skills
845                    );
846                    Err(SkillsPersistenceError::DeserializationFailure)
847                },
848            }
849        };
850
851        deserialized_skills.insert(skill_group_kind, skills_result);
852
853        new_skill_groups.insert(skill_group_kind, new_skill_group);
854    }
855    (new_skill_groups, deserialized_skills)
856}
857
858pub fn convert_skill_groups_to_database<'a, I: Iterator<Item = &'a skillset::SkillGroup>>(
859    entity_id: CharacterId,
860    skill_groups: I,
861) -> Vec<SkillGroup> {
862    let skill_group_hashes = &skillset::SKILL_GROUP_HASHES;
863    skill_groups
864        .into_iter()
865        .map(|sg| SkillGroup {
866            entity_id: entity_id.0,
867            skill_group_kind: json_models::skill_group_to_db_string(sg.skill_group_kind),
868            earned_exp: i64::from(sg.earned_exp),
869            spent_exp: i64::from(sg.spent_exp()),
870            // If fails to convert, just forces a respec on next login
871            skills: serde_json::to_string(&sg.ordered_skills).unwrap_or_else(|_| "".to_string()),
872            hash_val: skill_group_hashes
873                .get(&sg.skill_group_kind)
874                .cloned()
875                .unwrap_or_default(),
876        })
877        .collect()
878}
879
880pub fn convert_active_abilities_to_database(
881    entity_id: CharacterId,
882    active_abilities: &ActiveAbilities,
883) -> AbilitySets {
884    let ability_sets = json_models::active_abilities_to_db_model(active_abilities);
885    AbilitySets {
886        entity_id: entity_id.0,
887        ability_sets: serde_json::to_string(&ability_sets).unwrap_or_default(),
888    }
889}
890
891pub fn convert_active_abilities_from_database(ability_sets: &AbilitySets) -> ActiveAbilities {
892    let ability_sets = serde_json::from_str::<Vec<DatabaseAbilitySet>>(&ability_sets.ability_sets)
893        .unwrap_or_else(|err| {
894            common_base::dev_panic!(format!(
895                "Failed to parse ability sets. Error: {:#?}\nAbility sets:\n{:#?}",
896                err, ability_sets.ability_sets
897            ));
898            Vec::new()
899        });
900    json_models::active_abilities_from_db_model(ability_sets)
901}
902
903pub fn convert_recipe_book_from_database_items(
904    database_items: &[Item],
905) -> Result<RecipeBook, PersistenceError> {
906    let mut recipes_groups = Vec::new();
907
908    for db_item in database_items.iter() {
909        let item = get_item_from_asset(db_item.item_definition_id.as_str())?;
910
911        // NOTE: item id is currently *unique*, so we can store the ID safely.
912        let comp = item.get_item_id_for_database();
913        comp.store(Some(NonZeroU64::try_from(db_item.item_id as u64).map_err(
914            |_| PersistenceError::ConversionError("Item with zero item_id".to_owned()),
915        )?));
916
917        recipes_groups.push(item);
918    }
919
920    let recipe_book = RecipeBook::recipe_book_from_persistence(recipes_groups);
921
922    Ok(recipe_book)
923}