use crate::db_object::{DBObject, OrderTypes};
use crate::df_world::DBDFWorld;
use crate::schema::historical_event_collections;
use crate::DbConnection;
use df_st_core::fillable::{Fillable, Filler};
use df_st_core::item_count::ItemCount;
use df_st_derive::{Fillable, HashAndPartialEqById};
use diesel::expression_methods::ExpressionMethods;
use diesel::prelude::*;
use diesel::query_dsl::RunQueryDsl;
use diesel::Queryable;
use failure::Error;
use std::collections::HashMap;
mod hec_a_support_merc_hf_id;
mod hec_attacking_hf_id;
mod hec_d_support_merc_hf_id;
mod hec_defending_hf_id;
mod hec_he_id;
mod hec_individual_merc;
mod hec_noncom_hf_id;
mod hec_outcome;
mod hec_related_id;
mod historical_event_collection_a_c;
mod historical_event_collection_d_z;
pub use hec_a_support_merc_hf_id::HECASupportMercHFID;
pub use hec_attacking_hf_id::HECAttackingHFID;
pub use hec_d_support_merc_hf_id::HECDSupportMercHFID;
pub use hec_defending_hf_id::HECDefendingHFID;
pub use hec_he_id::HECHEID;
pub use hec_individual_merc::HECIndividualMerc;
pub use hec_noncom_hf_id::HECNoncomHFID;
pub use hec_outcome::HECOutcome;
pub use hec_related_id::HECRelatedID;
pub use historical_event_collection_a_c::HistoricalEventCollectionAC;
pub use historical_event_collection_d_z::HistoricalEventCollectionDZ;
#[derive(
Clone,
Debug,
AsChangeset,
Identifiable,
HashAndPartialEqById,
Queryable,
Insertable,
Fillable,
Default,
)]
#[table_name = "historical_event_collections"]
pub struct HistoricalEventCollection {
pub id: i32,
pub world_id: i32,
pub type_: Option<String>,
pub start_year: Option<i32>,
pub start_seconds72: Option<i32>,
pub end_year: Option<i32>,
pub end_seconds72: Option<i32>,
}
impl HistoricalEventCollection {
pub fn new() -> Self {
Self::default()
}
}
impl DBObject<df_st_core::HistoricalEventCollection, HistoricalEventCollection>
for HistoricalEventCollection
{
fn add_missing_data_advanced(core_world: &df_st_core::DFWorld, world: &mut DBDFWorld) {
for hec in core_world.historical_event_collections.values() {
for item in &hec.he_ids {
let mut db_item = HECHEID::new();
db_item.hec_id = hec.id;
db_item.he_id = *item;
world.hec_he_ids.push(db_item);
}
for item in &hec.hec_ids {
let mut db_item = HECRelatedID::new();
db_item.hec_id = hec.id;
db_item.rel_hec_id = *item;
world.hec_related_ids.push(db_item);
}
for item in &hec.individual_merc {
let mut db_item = HECIndividualMerc::new();
db_item.hec_id = hec.id;
db_item.individual_merc = *item;
world.hec_individual_mercs.push(db_item);
}
for item in &hec.noncom_hf_id {
let mut db_item = HECNoncomHFID::new();
db_item.hec_id = hec.id;
db_item.noncom_hf_id = *item;
world.hec_noncom_hf_ids.push(db_item);
}
for item in &hec.outcome {
let mut db_item = HECOutcome::new();
db_item.hec_id = hec.id;
db_item.outcome = item.clone();
world.hec_outcomes.push(db_item);
}
for item in &hec.attacking_hf_id {
let mut db_item = HECAttackingHFID::new();
db_item.hec_id = hec.id;
db_item.attacking_hf_id = *item;
world.hec_attacking_hf_ids.push(db_item);
}
for item in &hec.defending_hf_id {
let mut db_item = HECDefendingHFID::new();
db_item.hec_id = hec.id;
db_item.defending_hf_id = *item;
world.hec_defending_hf_ids.push(db_item);
}
for item in &hec.a_support_merc_hf_id {
let mut db_item = HECASupportMercHFID::new();
db_item.hec_id = hec.id;
db_item.a_support_merc_hf_id = *item;
world.hec_a_support_merc_hf_ids.push(db_item);
}
for item in &hec.d_support_merc_hf_id {
let mut db_item = HECDSupportMercHFID::new();
db_item.hec_id = hec.id;
db_item.d_support_merc_hf_id = *item;
world.hec_d_support_merc_hf_ids.push(db_item);
}
}
}
#[cfg(feature = "postgres")]
fn insert_into_db(
conn: &DbConnection,
historical_event_collections: &[HistoricalEventCollection],
) {
use diesel::pg::upsert::excluded;
diesel::insert_into(historical_event_collections::table)
.values(historical_event_collections)
.on_conflict((
historical_event_collections::id,
historical_event_collections::world_id,
))
.do_update()
.set((
historical_event_collections::type_
.eq(excluded(historical_event_collections::type_)),
historical_event_collections::start_year
.eq(excluded(historical_event_collections::start_year)),
historical_event_collections::start_seconds72
.eq(excluded(historical_event_collections::start_seconds72)),
historical_event_collections::end_year
.eq(excluded(historical_event_collections::end_year)),
historical_event_collections::end_seconds72
.eq(excluded(historical_event_collections::end_seconds72)),
))
.execute(conn)
.expect("Error saving historical_event_collections");
}
#[cfg(not(feature = "postgres"))]
fn insert_into_db(
conn: &DbConnection,
historical_event_collections: &[HistoricalEventCollection],
) {
diesel::insert_into(historical_event_collections::table)
.values(historical_event_collections)
.execute(conn)
.expect("Error saving historical_event_collections");
}
fn find_db_item(
conn: &DbConnection,
id_filter: HashMap<String, i32>,
) -> Result<Option<HistoricalEventCollection>, Error> {
use crate::schema::historical_event_collections::dsl::*;
let query = historical_event_collections;
let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
let query = query.filter(id.eq(id_filter.get("id").unwrap_or(&0)));
Ok(query.first::<HistoricalEventCollection>(conn).optional()?)
}
fn find_db_list(
conn: &DbConnection,
id_filter: HashMap<String, i32>,
_string_filter: HashMap<String, String>,
offset: i64,
limit: i64,
order: Option<OrderTypes>,
order_by: Option<String>,
id_list: Option<Vec<i32>>,
) -> Result<Vec<HistoricalEventCollection>, Error> {
use crate::schema::historical_event_collections::dsl::*;
let (order_by, asc) = Self::get_order(order, order_by);
let query = historical_event_collections.limit(limit).offset(offset);
let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
optional_filter! {
query, id_filter,
id_list => id,
[
"id" => id,
],
{Ok(order_by!{
order_by, asc, query, conn,
"id" => id,
"type" => type_,
"start_year" => start_year,
"start_seconds72" => start_seconds72,
"end_year" => end_year,
"end_seconds72" => end_seconds72,
})},
}
}
fn match_field_by(field: String) -> String {
match field.as_ref() {
"type" => "type",
"start_year" => "start_year",
"start_seconds72" => "start_seconds72",
"end_year" => "end_year",
"end_seconds72" => "end_seconds72",
_ => "id",
}
.to_owned()
}
fn add_nested_items(
conn: &DbConnection,
db_list: &[HistoricalEventCollection],
_core_list: Vec<df_st_core::HistoricalEventCollection>,
) -> Result<Vec<df_st_core::HistoricalEventCollection>, Error> {
let world_id = match db_list.first() {
Some(x) => x.world_id,
None => 0,
};
let hec_a_c_list = HistoricalEventCollectionAC::belonging_to(db_list)
.filter(crate::schema::historical_event_collections_a_c::world_id.eq(world_id))
.load::<HistoricalEventCollectionAC>(conn)?
.grouped_by(db_list);
let hec_d_z_list = HistoricalEventCollectionDZ::belonging_to(db_list)
.filter(crate::schema::historical_event_collections_d_z::world_id.eq(world_id))
.load::<HistoricalEventCollectionDZ>(conn)?
.grouped_by(db_list);
let he_id_list = HECHEID::belonging_to(db_list)
.filter(crate::schema::hec_he_ids::world_id.eq(world_id))
.load::<HECHEID>(conn)?
.grouped_by(db_list);
let hec_related_list = HECRelatedID::belonging_to(db_list)
.filter(crate::schema::hec_related_ids::world_id.eq(world_id))
.load::<HECRelatedID>(conn)?
.grouped_by(db_list);
let indiv_merc_list = HECIndividualMerc::belonging_to(db_list)
.filter(crate::schema::hec_individual_mercs::world_id.eq(world_id))
.load::<HECIndividualMerc>(conn)?
.grouped_by(db_list);
let noncom_hf_id_list = HECNoncomHFID::belonging_to(db_list)
.filter(crate::schema::hec_noncom_hf_ids::world_id.eq(world_id))
.load::<HECNoncomHFID>(conn)?
.grouped_by(db_list);
let outcome_list = HECOutcome::belonging_to(db_list)
.filter(crate::schema::hec_outcomes::world_id.eq(world_id))
.load::<HECOutcome>(conn)?
.grouped_by(db_list);
let attack_hf_id_list = HECAttackingHFID::belonging_to(db_list)
.filter(crate::schema::hec_attacking_hf_ids::world_id.eq(world_id))
.load::<HECAttackingHFID>(conn)?
.grouped_by(db_list);
let defending_hf_id_list = HECDefendingHFID::belonging_to(db_list)
.filter(crate::schema::hec_defending_hf_ids::world_id.eq(world_id))
.load::<HECDefendingHFID>(conn)?
.grouped_by(db_list);
let a_support_merc_hf_id_list = HECASupportMercHFID::belonging_to(db_list)
.filter(crate::schema::hec_a_support_merc_hf_ids::world_id.eq(world_id))
.load::<HECASupportMercHFID>(conn)?
.grouped_by(db_list);
let d_support_merc_hf_id_list = HECDSupportMercHFID::belonging_to(db_list)
.filter(crate::schema::hec_d_support_merc_hf_ids::world_id.eq(world_id))
.load::<HECDSupportMercHFID>(conn)?
.grouped_by(db_list);
let mut core_list: Vec<df_st_core::HistoricalEventCollection> = Vec::new();
for (index, hec) in db_list.iter().enumerate() {
let mut core_hec = df_st_core::HistoricalEventCollection::new();
core_hec.add_missing_data(hec);
let hec_a_c = hec_a_c_list.get(index).unwrap().get(0).unwrap();
core_hec.add_missing_data(hec_a_c);
let hec_d_z = hec_d_z_list.get(index).unwrap().get(0).unwrap();
core_hec.add_missing_data(hec_d_z);
for he_id in he_id_list.get(index).unwrap() {
core_hec.he_ids.push(he_id.he_id);
}
for hec_id in hec_related_list.get(index).unwrap() {
core_hec.hec_ids.push(hec_id.hec_id);
}
for indiv_merc in indiv_merc_list.get(index).unwrap() {
core_hec.individual_merc.push(indiv_merc.individual_merc);
}
for noncom_hf_id in noncom_hf_id_list.get(index).unwrap() {
core_hec.noncom_hf_id.push(noncom_hf_id.noncom_hf_id);
}
for outcome in outcome_list.get(index).unwrap() {
core_hec.outcome.push(outcome.outcome.clone());
}
for attack_hf_id in attack_hf_id_list.get(index).unwrap() {
core_hec.attacking_hf_id.push(attack_hf_id.attacking_hf_id);
}
for defending_hf_id in defending_hf_id_list.get(index).unwrap() {
core_hec
.defending_hf_id
.push(defending_hf_id.defending_hf_id);
}
for support_merc in a_support_merc_hf_id_list.get(index).unwrap() {
core_hec
.a_support_merc_hf_id
.push(support_merc.a_support_merc_hf_id);
}
for support_merc in d_support_merc_hf_id_list.get(index).unwrap() {
core_hec
.d_support_merc_hf_id
.push(support_merc.d_support_merc_hf_id);
}
core_list.push(core_hec);
}
Ok(core_list)
}
fn get_count_from_db(
conn: &DbConnection,
id_filter: HashMap<String, i32>,
_string_filter: HashMap<String, String>,
offset: u32,
limit: u32,
group_by_opt: Option<String>,
id_list: Option<Vec<i32>>,
) -> Result<Vec<ItemCount>, Error> {
use crate::schema::historical_event_collections::dsl::*;
let query = historical_event_collections
.limit(limit as i64)
.offset(offset as i64);
let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
optional_filter! {
query, id_filter,
id_list => id,
[
"id" => id,
],
{group_by!{
group_by_opt, query, conn,
"id" => {id: i32},
"type" => {type_: Option<String>},
"start_year" => {start_year: Option<i32>},
"start_seconds72" => {start_seconds72: Option<i32>},
"end_year" => {end_year: Option<i32>},
"end_seconds72" => {end_seconds72: Option<i32>},
};},
};
}
}
impl Filler<HistoricalEventCollection, df_st_core::HistoricalEventCollection>
for HistoricalEventCollection
{
fn add_missing_data(&mut self, source: &df_st_core::HistoricalEventCollection) {
self.id.add_missing_data(&source.id);
self.type_.add_missing_data(&source.type_);
self.start_year.add_missing_data(&source.start_year);
self.start_seconds72
.add_missing_data(&source.start_seconds72);
self.end_year.add_missing_data(&source.end_year);
self.end_seconds72.add_missing_data(&source.end_seconds72);
}
}
impl Filler<df_st_core::HistoricalEventCollection, HistoricalEventCollection>
for df_st_core::HistoricalEventCollection
{
fn add_missing_data(&mut self, source: &HistoricalEventCollection) {
self.id.add_missing_data(&source.id);
self.type_.add_missing_data(&source.type_);
self.start_year.add_missing_data(&source.start_year);
self.start_seconds72
.add_missing_data(&source.start_seconds72);
self.end_year.add_missing_data(&source.end_year);
self.end_seconds72.add_missing_data(&source.end_seconds72);
}
}
impl PartialEq<HistoricalEventCollection> for df_st_core::HistoricalEventCollection {
fn eq(&self, other: &HistoricalEventCollection) -> bool {
self.id == other.id
}
}
impl PartialEq<df_st_core::HistoricalEventCollection> for HistoricalEventCollection {
fn eq(&self, other: &df_st_core::HistoricalEventCollection) -> bool {
self.id == other.id
}
}