1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
use crate::db_object::{DBObject, OrderTypes};
use crate::df_world::{Coordinate, DBDFWorld};
use crate::schema::mountain_peaks;
use crate::DbConnection;
use df_st_core::fillable::{Fillable, Filler};
use df_st_core::item_count::ItemCount;
use df_st_derive::{Fillable, HashAndPartialEqById};
use diesel::expression_methods::ExpressionMethods;
use diesel::prelude::*;
use diesel::query_dsl::RunQueryDsl;
use diesel::Queryable;
use failure::Error;
#[allow(unused_imports)]
use log::{debug, error, info, trace, warn};
use std::collections::HashMap;
use std::hash::{BuildHasher, Hash, Hasher};

#[derive(
    Clone,
    Debug,
    AsChangeset,
    Identifiable,
    HashAndPartialEqById,
    Queryable,
    Insertable,
    Fillable,
    Default,
)]
#[table_name = "mountain_peaks"]
pub struct MountainPeak {
    pub id: i32,
    pub world_id: i32,
    pub name: Option<String>,
    pub coord_id: Option<i32>,
    pub height: Option<i32>,
    pub is_volcano: Option<bool>,
}

impl MountainPeak {
    pub fn new() -> Self {
        Self::default()
    }
}

impl DBObject<df_st_core::MountainPeak, MountainPeak> for MountainPeak {
    fn add_missing_data_advanced(core_world: &df_st_core::DFWorld, world: &mut DBDFWorld) {
        for mountain_peak in core_world.mountain_peaks.values() {
            // hash mountain_peaks for identifier
            let mut hasher = world.mountain_peaks.hasher().build_hasher();
            mountain_peak.hash(&mut hasher);
            let hash = hasher.finish();

            if let Some(coord) = &mountain_peak.coord {
                let new_id: i32 = world.coordinates.len() as i32;
                world.coordinates.push(Coordinate {
                    id: new_id,
                    x: coord.x,
                    y: coord.y,
                    ..Default::default()
                });
                if let Some(db_mountain_peak) = world.mountain_peaks.get_mut(&hash) {
                    db_mountain_peak.coord_id = Some(new_id);
                } else {
                    warn!("MountainPeak not found, Can not update coord_id. Please report this.");
                }
            }
        }
    }

    #[cfg(feature = "postgres")]
    fn insert_into_db(conn: &DbConnection, mountain_peaks: &[MountainPeak]) {
        use diesel::pg::upsert::excluded;
        diesel::insert_into(mountain_peaks::table)
            .values(mountain_peaks)
            .on_conflict((mountain_peaks::id, mountain_peaks::world_id))
            .do_update()
            .set((
                mountain_peaks::name.eq(excluded(mountain_peaks::name)),
                mountain_peaks::coord_id.eq(excluded(mountain_peaks::coord_id)),
                mountain_peaks::height.eq(excluded(mountain_peaks::height)),
                mountain_peaks::is_volcano.eq(excluded(mountain_peaks::is_volcano)),
            ))
            .execute(conn)
            .expect("Error saving mountain_peaks");
    }

    #[cfg(not(feature = "postgres"))]
    fn insert_into_db(conn: &DbConnection, mountain_peaks: &[MountainPeak]) {
        diesel::insert_into(mountain_peaks::table)
            .values(mountain_peaks)
            .execute(conn)
            .expect("Error saving mountain_peaks");
    }

    fn find_db_item(
        conn: &DbConnection,
        id_filter: HashMap<String, i32>,
    ) -> Result<Option<MountainPeak>, Error> {
        use crate::schema::mountain_peaks::dsl::*;
        let query = mountain_peaks;
        let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
        let query = query.filter(id.eq(id_filter.get("id").unwrap_or(&0)));
        Ok(query.first::<MountainPeak>(conn).optional()?)
    }

    fn find_db_list(
        conn: &DbConnection,
        id_filter: HashMap<String, i32>,
        _string_filter: HashMap<String, String>,
        offset: i64,
        limit: i64,
        order: Option<OrderTypes>,
        order_by: Option<String>,
        id_list: Option<Vec<i32>>,
    ) -> Result<Vec<MountainPeak>, Error> {
        use crate::schema::mountain_peaks::dsl::*;
        let (order_by, asc) = Self::get_order(order, order_by);
        let query = mountain_peaks.limit(limit).offset(offset);
        let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
        optional_filter! {
            query, id_filter,
            id_list => id,
            [
                "id" => id,
            ],
            {Ok(order_by!{
                order_by, asc, query, conn,
                "id" => id,
                "name" => name,
                "height" => height,
                "is_volcano" => is_volcano,
            })},
        }
    }

    fn match_field_by(field: String) -> String {
        match field.as_ref() {
            "name" => "name",
            "height" => "height",
            "is_volcano" => "is_volcano",
            _ => "id",
        }
        .to_owned()
    }

    fn add_nested_items(
        conn: &DbConnection,
        db_list: &[MountainPeak],
        mut core_list: Vec<df_st_core::MountainPeak>,
    ) -> Result<Vec<df_st_core::MountainPeak>, Error> {
        use crate::schema::coordinates;
        let world_id = match db_list.first() {
            Some(x) => x.world_id,
            None => 0,
        };
        let mountain_peak_ids: Vec<i32> = db_list
            .iter()
            .map(|mountain_peak| mountain_peak.id)
            .collect();
        // Add coordinates
        let coord_list = coordinates::table
            .inner_join(
                mountain_peaks::table.on(coordinates::id.nullable().eq(mountain_peaks::coord_id)),
            )
            .filter(crate::schema::coordinates::world_id.eq(world_id))
            .filter(mountain_peaks::id.eq_any(mountain_peak_ids))
            .load::<(Coordinate, MountainPeak)>(conn)?;

        core_list = core_list
            .into_iter()
            .map(|mut mountain_peak| {
                let mut coord: Option<df_st_core::Coordinate> = None;
                for (coord_new, mp) in &coord_list {
                    if &mountain_peak == mp {
                        coord.add_missing_data(&Some(coord_new.clone()));
                        break;
                    }
                }
                mountain_peak.coord = coord;
                mountain_peak
            })
            .collect();
        Ok(core_list)
    }

    fn get_count_from_db(
        conn: &DbConnection,
        id_filter: HashMap<String, i32>,
        _string_filter: HashMap<String, String>,
        offset: u32,
        limit: u32,
        group_by_opt: Option<String>,
        id_list: Option<Vec<i32>>,
    ) -> Result<Vec<ItemCount>, Error> {
        use crate::schema::mountain_peaks::dsl::*;
        let query = mountain_peaks.limit(limit as i64).offset(offset as i64);
        let query = query.filter(world_id.eq(id_filter.get("world_id").unwrap_or(&0)));
        optional_filter! {
            query, id_filter,
            id_list => id,
            [
                "id" => id,
            ],
            {group_by!{
                group_by_opt, query, conn,
                "id" => {id: i32},
                "name" => {name: Option<String>},
                "height" => {height: Option<i32>},
                "is_volcano" => {is_volcano: Option<bool>},
            };},
        };
    }
}

/// From Core to DB
impl Filler<MountainPeak, df_st_core::MountainPeak> for MountainPeak {
    fn add_missing_data(&mut self, source: &df_st_core::MountainPeak) {
        self.id.add_missing_data(&source.id);
        self.name.add_missing_data(&source.name);
        self.height.add_missing_data(&source.height);
        self.is_volcano.add_missing_data(&source.is_volcano);
    }
}

/// From DB to Core
impl Filler<df_st_core::MountainPeak, MountainPeak> for df_st_core::MountainPeak {
    fn add_missing_data(&mut self, source: &MountainPeak) {
        self.id.add_missing_data(&source.id);
        self.name.add_missing_data(&source.name);
        self.height.add_missing_data(&source.height);
        self.is_volcano.add_missing_data(&source.is_volcano);
    }
}

impl PartialEq<MountainPeak> for df_st_core::MountainPeak {
    fn eq(&self, other: &MountainPeak) -> bool {
        self.id == other.id
    }
}

impl PartialEq<df_st_core::MountainPeak> for MountainPeak {
    fn eq(&self, other: &df_st_core::MountainPeak) -> bool {
        self.id == other.id
    }
}