1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
//! Import and deserialize the checkpoint or exported files

use std::fs::{File, self};
use std::io::Read;

use bevy::prelude::*;
use bevy_rapier2d::prelude::ImpulseJoint;
use serde_json;

use crate::blob::blob::Blob;
use crate::blob::geno_blob_builder::GenoBlobBuilder;
use crate::brain::resource::BevyBlockNeurons;
use crate::componet::ColliderFlag;
use crate::consts::*;
use crate::physics::world::Wall;

use super::export::ExportFile;

/// load blobs from an exported file or checkpoints file
pub fn load_blobs(
    commands: Commands,
    mut bbn: ResMut<BevyBlockNeurons>,
    input: Res<Input<KeyCode>>,
) {
    let mut load_fname = LOAD_FNAME.to_string();
    if LOAD_NEWEST_FILE {
        let path = newest_file_name_in_directory(LOAD_FOLDER);
        if let Some(path) = path {
            load_fname = LOAD_FOLDER.to_string() + &path;
        } else {
            panic!("empty load folder")
        }
    }
    
    if input.just_pressed(LOAD_ALL_BLOBS_FROM_JSON) {
        match File::open(&load_fname) {
            Ok(mut file) => {
                let mut file_str = String::new();

                // Handle read_to_string error
                if let Err(e) = file.read_to_string(&mut file_str) {
                    warn!("Failed to read from file {}: {:?}", load_fname, e);
                    return;
                }

                // Handle serde_json parsing error
                match serde_json::from_str::<ExportFile>(&file_str) {
                    Ok(ef) => {
                        ef.check();
                        overwrite(ef, commands, &mut bbn);
                    }
                    Err(e) => {
                        warn!("Failed to parse the file content as `ExportFile`: {:?}", e);
                    }
                }
            }
            Err(e) => {
                warn!("Failed to open file {}: {:?}", LOAD_FNAME, e);
            }
        }
    }
}


/// despawn all the entities relate to blob
/// 
/// clean resources
pub fn clean(
    mut commands: Commands,
    mut bbn: ResMut<BevyBlockNeurons>,
    blob_q: Query<Entity, With<Blob>>,
    collider_q: Query<Entity, (With<ColliderFlag>, Without<Wall>)>,
    joint_q: Query<Entity, With<ImpulseJoint>>,
    input: Res<Input<KeyCode>>,
) {
    if input.just_pressed(LOAD_ALL_BLOBS_FROM_JSON) || input.just_pressed(CLEAN_ALL_BLOBS_KEYCODE) {
        for entity in blob_q.iter().chain(collider_q.iter()).chain(joint_q.iter()) {
            commands.entity(entity).despawn()
        }
        bbn.clear();
    }
}

/// ignore and overwrite all blobs and NNs that exist
fn overwrite(mut ef: ExportFile, commands: Commands, bbn: &mut BevyBlockNeurons) {
    let mut builder = GenoBlobBuilder::from_commands(commands, &mut bbn.nnvec);

    // build loaded blobs
    for (geno, pos, _nnvec) in ef.iter_mut() {
        // println!("{:#?}",geno);
        builder.build(geno, *pos);
        // println!("\n{:#?}",geno);
    }

    // set resource
    bbn.nnvec = ef.flatten_nnvec();
}

/// take folder path as input, return fname
fn newest_file_name_in_directory(dir: &str) -> Option<String> {
    fs::read_dir(dir)
        .ok()?
        .filter_map(|entry| {
            let entry = entry.ok()?;
            if entry.path().extension()? == "json" {
                entry.path().file_name()?.to_str().map(String::from)
            } else {
                None
            }
        })
        .max()
}