Added initial card parsing and client state

This commit is contained in:
2024-05-02 20:05:52 -04:00
parent 8007aca7ff
commit 921f2de61a
5 changed files with 383 additions and 35 deletions

View File

@@ -27,8 +27,11 @@ polars = { version = "0.39.2", default-features = false, features = [
"lazy", "lazy",
"concat_str", "concat_str",
"strings", "strings",
"regex",
"csv", "csv",
"json", "json",
"dtype-struct",
"serde",
] } ] }
[target.'cfg(engine)'.dev-dependencies] [target.'cfg(engine)'.dev-dependencies]

View File

@@ -1,20 +1,83 @@
use core::panic;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
use polars::prelude::*; use polars::prelude::*;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::io::Cursor; use std::io::Cursor;
use std::{collections::HashMap, hash::Hash}; use std::{
collections::{HashMap, HashSet},
hash::Hash,
};
#[cfg(engine)] #[cfg(engine)]
use std::fs; use std::fs;
#[cfg(engine)] #[cfg(engine)]
use std::path::Path; use std::path::Path;
enum CartType { #[derive(Serialize, Deserialize, Clone, Debug)]
NormalMonster, pub enum MonsterCardType {
EffectMonster, Regular,
SpellCard, Normal,
TrapCard, Xyz,
Unknown { name: String }, Ritual,
Fusion,
Synchro,
Link,
Token,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum MonsterAttribute {
Dark,
Divine,
Earth,
Fire,
Light,
Water,
Wind,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum SpellType {
Normal,
Continuous,
Equip,
QuickPlay,
Field,
Ritual,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum TrapType {
Normal,
Continuous,
Counter,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub enum CardTypeInfo {
Monster {
level: u32, // level/rank/link rating
atk: u32,
def: Option<u32>,
pendulum_scale: Option<u32>,
attribute: MonsterAttribute,
monster_type: String,
monster_card_type: MonsterCardType,
tuner: bool,
pendulum: bool,
// abilities
flip: bool,
spirit: bool,
toon: bool,
union: bool,
gemini: bool,
},
Spell {
spell_type: SpellType,
},
Trap {
trap_type: TrapType,
},
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
@@ -33,8 +96,10 @@ pub struct ArchetypeInfo {
pub struct CardInfo { pub struct CardInfo {
pub id: u32, pub id: u32,
pub name: String, pub name: String,
pub card_sets: Vec<CardSet>, pub desc: String,
pub card_type_info: CardTypeInfo,
pub archetype: Option<String>, pub archetype: Option<String>,
pub card_sets: Vec<CardSet>,
} }
#[derive(Serialize, Deserialize, Clone, Debug)] #[derive(Serialize, Deserialize, Clone, Debug)]
@@ -43,42 +108,327 @@ pub struct CardInstance {
set_id: String, set_id: String,
} }
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct CardTable { pub struct CardTable {
pub cards: HashMap<u32, CardInfo>, pub cards: HashMap<u32, CardInfo>,
pub sets: HashMap<u32, CardSet>, pub sets: HashMap<u32, CardSet>,
pub archetypes: HashMap<String, ArchetypeInfo>, pub archetypes: HashMap<String, ArchetypeInfo>,
pub monster_types: HashSet<String>,
pub df: DataFrame, pub df: DataFrame,
} }
impl CardTable { impl CardTable {
pub fn df_to_card_info_test(df: DataFrame) -> Vec<CardInfo> {
let id_idx = df.get_column_index("id").unwrap();
let name_idx = df.get_column_index("name").unwrap();
let desc_idx = df.get_column_index("desc").unwrap();
let arch_idx = df.get_column_index("archetype").unwrap();
let type_idx = df.get_column_index("type").unwrap();
let spell_type_idx = df.get_column_index("spell_type").unwrap();
let trap_type_idx = df.get_column_index("trap_type").unwrap();
let level_idx = df.get_column_index("level").unwrap();
let atk_idx = df.get_column_index("atk").unwrap();
let def_idx = df.get_column_index("def").unwrap();
let pendulum_scale_idx = df.get_column_index("scale").unwrap();
let attribute_idx = df.get_column_index("attribute").unwrap();
let monster_type_idx = df.get_column_index("monster_type").unwrap();
let monster_card_type_idx = df.get_column_index("monster_card_type").unwrap();
let tuner_idx = df.get_column_index("tuner").unwrap();
let pendulum_idx = df.get_column_index("pendulum").unwrap();
let flip_idx = df.get_column_index("flip").unwrap();
let spirit_idx = df.get_column_index("spirit").unwrap();
let toon_idx = df.get_column_index("toon").unwrap();
let union_idx = df.get_column_index("union").unwrap();
let gemini_idx = df.get_column_index("gemini").unwrap();
let all_cards: Vec<CardInfo> = df
.into_struct("Structs")
.iter()
.map(|row| {
let card_type = row[type_idx].get_str().unwrap();
let info = CardInfo {
id: row[id_idx].try_extract().unwrap(),
name: row[name_idx].get_str().unwrap().to_string(),
desc: row[desc_idx].get_str().unwrap().to_string(),
card_type_info: if card_type.contains("Spell") {
CardTypeInfo::Spell {
spell_type: match row[spell_type_idx].get_str().unwrap() {
"Continuous" => SpellType::Continuous,
"Quick-Play" => SpellType::QuickPlay,
"Equip" => SpellType::Equip,
"Normal" => SpellType::Normal,
"Field" => SpellType::Field,
"Ritual" => SpellType::Ritual,
unknown => panic!("Unknown spell type {}", unknown),
},
}
} else if card_type.contains("Trap") {
CardTypeInfo::Trap {
trap_type: match row[trap_type_idx].get_str().unwrap() {
"Continuous" => TrapType::Continuous,
"Counter" => TrapType::Counter,
"Normal" => TrapType::Normal,
unknown => panic!("Unknown trap type {}", unknown),
},
}
} else {
CardTypeInfo::Monster {
level: row[level_idx].try_extract().unwrap(),
atk: row[atk_idx].try_extract().unwrap(),
def: row[def_idx].try_extract().ok(),
pendulum_scale: row[pendulum_scale_idx].try_extract().ok(),
attribute: match row[attribute_idx].get_str().unwrap() {
"DARK" => MonsterAttribute::Dark,
"DIVINE" => MonsterAttribute::Divine,
"EARTH" => MonsterAttribute::Earth,
"FIRE" => MonsterAttribute::Fire,
"LIGHT" => MonsterAttribute::Light,
"WATER" => MonsterAttribute::Water,
"WIND" => MonsterAttribute::Wind,
unknown => panic!("Unknown attribute {}", unknown),
},
monster_type: row[monster_type_idx]
.get_str()
.unwrap_or("Token")
.to_string(),
monster_card_type: match row[monster_card_type_idx].get_str().unwrap() {
"Fusion" => MonsterCardType::Fusion,
"Link" => MonsterCardType::Link,
"Normal" => MonsterCardType::Normal,
"Regular" => MonsterCardType::Regular,
"Ritual" => MonsterCardType::Ritual,
"Synchro" => MonsterCardType::Synchro,
"Token" => MonsterCardType::Token,
"XYZ" => MonsterCardType::Xyz,
unknown => panic!("Unknown monster card type {}", unknown),
},
tuner: match row[tuner_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
pendulum: match row[pendulum_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
flip: match row[flip_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
spirit: match row[spirit_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
toon: match row[toon_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
union: match row[union_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
gemini: match row[gemini_idx] {
AnyValue::Boolean(val) => val,
_ => panic!("Expected bool"),
},
}
},
archetype: match row[arch_idx].get_str() {
Some(arch) => Some(String::from(arch)),
None => None,
},
card_sets: vec![],
};
info
})
.collect();
all_cards
}
#[cfg(engine)] #[cfg(engine)]
pub fn new_from_server_json(path: &Path) -> Self { pub fn new_from_server_json(path: &Path) -> Self {
// First load json into initial dataframe // First load json into initial dataframe
// // Override types to be correct
// let raw_df_schema = Schema::from_iter(vec![
// Field::new("id", DataType::UInt64),
// ]);
// TODO list all required files // TODO list all required files
let raw_df = JsonReader::new(std::fs::File::open("./data/cardinfo.json").unwrap()) // Convert JSON to dataframe
use core::arch;
use axum::handler::HandlerWithoutStateExt;
let df = JsonReader::new(std::fs::File::open(path).unwrap())
.finish() .finish()
.unwrap(); .unwrap();
let raw_df = raw_df // Cast to inner "data" dictionary
let df = df
.lazy() .lazy()
.select(&[col("data")]) .select(&[col("data")])
.explode(vec!["data"]) .explode(vec!["data"])
.unnest(vec!["data"]) .unnest(vec!["data"])
.collect() .collect()
.unwrap(); .unwrap();
log::info!("{:?}", &raw_df); // Cast headers dtypes to be correct
let df = df
.lazy()
.with_columns([
col("id").cast(DataType::UInt32),
col("atk").cast(DataType::UInt32),
col("def").cast(DataType::UInt32),
col("level").cast(DataType::UInt32),
col("linkval").cast(DataType::UInt32),
col("scale").cast(DataType::UInt32),
])
// Merge linkval and level columns
.with_columns([col("level").fill_null(col("linkval"))])
// Create separate columns for monster/spell/trap for readability
.with_columns([
when(col("type").str().contains(lit("Monster"), false))
.then(col("race"))
.otherwise(lit(NULL))
.alias("monster_type"),
when(col("type").str().contains(lit("Spell"), false))
.then(col("race"))
.otherwise(lit(NULL))
.alias("spell_type"),
when(col("type").str().contains(lit("Trap"), false))
.then(col("race"))
.otherwise(lit(NULL))
.alias("trap_type"),
])
// Create separate columns for monster type attributes
.with_columns(
[when(col("type").str().contains(lit("XYZ.*Monster"), false))
.then(lit("XYZ"))
.when(col("type").str().contains(lit("Ritual.*Monster"), false))
.then(lit("Ritual"))
.when(col("type").str().contains(lit("Fusion.*Monster"), false))
.then(lit("Fusion"))
.when(col("type").str().contains(lit("Synchro.*Monster"), false))
.then(lit("Synchro"))
.when(col("type").str().contains(lit("Link.*Monster"), false))
.then(lit("Link"))
.when(col("type").str().contains(lit("Normal.*Monster"), false))
.then(lit("Normal"))
.when(col("type").str().contains(lit("Token"), false))
.then(lit("Token"))
// default monster case
.when(col("type").str().contains(lit("Monster"), false))
.then(lit("Regular"))
.otherwise(lit(NULL))
.alias("monster_card_type")],
)
.with_columns([
when(col("type").str().contains(lit("Pendulum.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("pendulum"),
])
.with_columns([
when(col("type").str().contains(lit("Tuner.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("tuner"),
])
.with_columns([
when(col("type").str().contains(lit("Flip.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("flip"),
])
.with_columns([
when(col("type").str().contains(lit("Spirit.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("spirit"),
])
.with_columns([
when(col("type").str().contains(lit("Toon.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("toon"),
])
.with_columns([
when(col("type").str().contains(lit("Union.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("union"),
])
.with_columns([
when(col("type").str().contains(lit("Gemini.*Monster"), false))
.then(lit(true))
// default monster case
.when(col("type").str().contains(lit("Monster|Token"), false))
.then(lit(false))
.otherwise(lit(NULL))
.alias("gemini"),
])
.select([col("*").exclude([
"monster_desc",
"pend_desc",
"frameType",
"ygoprodeck_url",
"linkval",
"race"
])])
// Remove link markers, unless it's needed later
.select([col("*").exclude(["linkmarkers"])])
// TODO add banlist support
.select([col("*").exclude(["banlist_info"])])
// TODO readd
.select([col("*").exclude([
"card_sets",
"card_images",
"card_prices",
])])
// Filter out "Skill Card"
.filter(col("type").str().contains(lit("Skill Card"), false).not())
// Filters for testing
// .filter(col("type").str().contains(lit("Monster"), false))
// .filter(col("type").str().contains(lit("Token"), false))
// Final dataframe
.collect()
.unwrap();
let id_col = UInt32Chunked::new("id_row", &[1]).into_series(); log::info!("{:?}", &df);
let cards = HashMap::new(); let all_cards = Self::df_to_card_info_test(df.clone());
let mut cards = HashMap::new();
let sets = HashMap::new(); let sets = HashMap::new();
let archetypes = HashMap::new(); let archetypes = HashMap::new();
let monster_types = HashSet::new();
let df = DataFrame::new(vec![id_col]).unwrap(); for card in all_cards.iter() {
cards.insert(card.id, card.clone());
}
Self { Self {
cards, cards,
sets, sets,
archetypes, archetypes,
monster_types,
df, df,
} }
} }
@@ -90,12 +440,14 @@ impl CardTable {
let cards = HashMap::new(); let cards = HashMap::new();
let sets = HashMap::new(); let sets = HashMap::new();
let archetypes = HashMap::new(); let archetypes = HashMap::new();
let monster_types = HashSet::new();
let df = DataFrame::new(vec![id_col]).unwrap(); let df = DataFrame::new(vec![id_col]).unwrap();
Self { Self {
cards, cards,
sets, sets,
archetypes, archetypes,
monster_types,
df, df,
} }
} }

View File

@@ -4,30 +4,20 @@ use once_cell::sync::Lazy;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{fs, path::Path, sync::Mutex}; use std::{fs, path::Path, sync::Mutex};
use super::card::CardTable;
#[derive(Serialize, Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct Store { pub struct Store {
// pub matches: PoolMatchList, pub card_table: CardTable,
} }
impl Store { impl Store {
fn new() -> Store { fn new() -> Store {
fs::create_dir_all("data").unwrap(); // fs::create_dir_all("data").unwrap();
match Path::new("data/store.json").exists() { let card_table = CardTable::new_from_server_json(Path::new("./data/cardinfo.json"));
false => Store { Store { card_table}
// matches: PoolMatchList::new(),
},
true => {
let contents = fs::read_to_string("data/store.json").unwrap();
serde_json::from_str(&contents).unwrap()
}
}
} }
// TODO -> Store data // TODO -> Store data
#[allow(dead_code)]
pub fn write(&self) {
let contents = serde_json::to_string(&self).unwrap();
fs::write("data/store.json", contents).unwrap();
}
} }
pub static DATA: Lazy<Mutex<Store>> = Lazy::new(|| Mutex::new(Store::new())); pub static DATA: Lazy<Mutex<Store>> = Lazy::new(|| Mutex::new(Store::new()));

View File

@@ -42,9 +42,6 @@ pub async fn dflt_server<M: MutableStore + 'static, T: TranslationsManager + 'st
app = register_routes(app); app = register_routes(app);
// TODO move to server global
CardTable::new_from_server_json(Path::new("./data/cardinfo.json"));
axum::Server::bind(&addr) axum::Server::bind(&addr)
.serve(app.into_make_service()) .serve(app.into_make_service())
.await .await

View File

@@ -3,6 +3,8 @@
use perseus::{prelude::*, state::GlobalStateCreator}; use perseus::{prelude::*, state::GlobalStateCreator};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use crate::data::card::CardTable;
cfg_if::cfg_if! { cfg_if::cfg_if! {
if #[cfg(engine)] { if #[cfg(engine)] {
use std::thread; use std::thread;
@@ -14,7 +16,7 @@ cfg_if::cfg_if! {
#[derive(Serialize, Deserialize, ReactiveState, Clone)] #[derive(Serialize, Deserialize, ReactiveState, Clone)]
#[rx(alias = "AppStateRx")] #[rx(alias = "AppStateRx")]
pub struct AppState { pub struct AppState {
pub card_table: CardTable,
} }
pub fn get_global_state_creator() -> GlobalStateCreator { pub fn get_global_state_creator() -> GlobalStateCreator {
@@ -25,7 +27,11 @@ pub fn get_global_state_creator() -> GlobalStateCreator {
#[engine_only_fn] #[engine_only_fn]
fn get_state() -> AppState { fn get_state() -> AppState {
AppState {} let card_table = thread::spawn(move || DATA.lock().unwrap().deref().card_table.clone())
.join()
.unwrap();
AppState { card_table }
} }
#[engine_only_fn] #[engine_only_fn]