Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

implement serde feture #14

Merged
merged 5 commits into from
Feb 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 76 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 7 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,15 @@ default = ["max-index"]
crossover = ["genetic-rs/crossover"]
rayon = ["genetic-rs/rayon", "dep:rayon"]
max-index = []
serde = ["dep:serde", "dep:serde-big-array"]


[dependencies]
genetic-rs = "0.3"
rand = "0.8.5"
rayon = { version = "1.8.1", optional = true }
rayon = { version = "1.8.1", optional = true }
serde = { version = "1.0.197", features = ["derive"], optional = true }
serde-big-array = { version = "0.5.1", optional = true }

[dev-dependencies]
bincode = "1.3.3"
3 changes: 3 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,3 +18,6 @@ pub mod runnable;
pub use genetic_rs::prelude::*;
pub use runnable::*;
pub use topology::*;

#[cfg(feature = "serde")]
pub use nnt_serde::*;
153 changes: 147 additions & 6 deletions src/topology.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,85 @@ use std::{
use genetic_rs::prelude::*;
use rand::prelude::*;

#[cfg(feature = "serde")]
use serde::{Deserialize, Deserializer, Serialize, Serializer};

/// Contains useful structs for serializing/deserializing a [`NeuronTopology`]
#[cfg(feature = "serde")]
pub mod nnt_serde {
use super::*;
use serde::{Deserialize, Serialize};
use serde_big_array::BigArray;

/// A serializable wrapper for [`NeuronToplogy`]. See [`NNTSerde::from`] for conversion.
#[derive(Serialize, Deserialize)]
pub struct NNTSerde<const I: usize, const O: usize> {
#[serde(with = "BigArray")]
pub(crate) input_layer: [NeuronTopology; I],

pub(crate) hidden_layers: Vec<NeuronTopology>,

#[serde(with = "BigArray")]
pub(crate) output_layer: [NeuronTopology; O],

pub(crate) mutation_rate: f32,
pub(crate) mutation_passes: usize,
}

impl<const I: usize, const O: usize> From<&NeuralNetworkTopology<I, O>> for NNTSerde<I, O> {
fn from(value: &NeuralNetworkTopology<I, O>) -> Self {
let input_layer = value
.input_layer
.iter()
.map(|n| n.read().unwrap().clone())
.collect::<Vec<_>>()
.try_into()
.unwrap();

let hidden_layers = value
.hidden_layers
.iter()
.map(|n| n.read().unwrap().clone())
.collect();

let output_layer = value
.output_layer
.iter()
.map(|n| n.read().unwrap().clone())
.collect::<Vec<_>>()
.try_into()
.unwrap();

Self {
input_layer,
hidden_layers,
output_layer,
mutation_rate: value.mutation_rate,
mutation_passes: value.mutation_passes,
}
}
}

#[cfg(test)]
#[test]
fn serde() {
let mut rng = rand::thread_rng();
let nnt = NeuralNetworkTopology::<10, 10>::new(0.1, 3, &mut rng);
let nnts = NNTSerde::from(&nnt);

let encoded = bincode::serialize(&nnts).unwrap();

if let Some(_) = option_env!("TEST_CREATEFILE") {
std::fs::write("serde-test.nn", &encoded).unwrap();
}

let decoded: NNTSerde<10, 10> = bincode::deserialize(&encoded).unwrap();
let nnt2: NeuralNetworkTopology<10, 10> = decoded.into();

dbg!(nnt, nnt2);
}
}

/// Creates an [`ActivationFn`] object from a function
#[macro_export]
macro_rules! activation_fn {
Expand Down Expand Up @@ -374,14 +453,42 @@ impl<const I: usize, const O: usize> DivisionReproduction for NeuralNetworkTopol
}
}

/*
#[cfg(feature = "crossover")]
impl CrossoverReproduction for NeuralNetworkTopology {
fn crossover(&self, other: &Self, rng: &mut impl Rng) -> Self {
todo!();
#[cfg(feature = "serde")]
impl<const I: usize, const O: usize> From<nnt_serde::NNTSerde<I, O>>
for NeuralNetworkTopology<I, O>
{
fn from(value: nnt_serde::NNTSerde<I, O>) -> Self {
let input_layer = value
.input_layer
.into_iter()
.map(|n| Arc::new(RwLock::new(n)))
.collect::<Vec<_>>()
.try_into()
.unwrap();

let hidden_layers = value
.hidden_layers
.into_iter()
.map(|n| Arc::new(RwLock::new(n)))
.collect();

let output_layer = value
.output_layer
.into_iter()
.map(|n| Arc::new(RwLock::new(n)))
.collect::<Vec<_>>()
.try_into()
.unwrap();

NeuralNetworkTopology {
input_layer,
hidden_layers,
output_layer,
mutation_rate: value.mutation_rate,
mutation_passes: value.mutation_passes,
}
}
}
*/

/// An activation function object that implements [`fmt::Debug`] and is [`Send`]
#[derive(Clone)]
Expand All @@ -397,6 +504,38 @@ impl fmt::Debug for ActivationFn {
}
}

#[cfg(feature = "serde")]
impl Serialize for ActivationFn {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(&self.name)
}
}

#[cfg(feature = "serde")]
impl<'a> Deserialize<'a> for ActivationFn {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'a>,
{
let name = String::deserialize(deserializer)?;
let activations = activation_fn! {
sigmoid,
relu,
f32::tanh,
linear_activation
};

for a in activations {
if a.name == name {
return Ok(a);
}
}

// eventually will make an activation fn registry of sorts.
panic!("Custom activation functions currently not supported.") // TODO return error instead of raw panic
}
}

/// The sigmoid activation function.
pub fn sigmoid(n: f32) -> f32 {
1. / (1. + std::f32::consts::E.powf(-n))
Expand All @@ -414,6 +553,7 @@ pub fn linear_activation(n: f32) -> f32 {

/// A stateless version of [`Neuron`][crate::Neuron].
#[derive(Debug, Clone)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub struct NeuronTopology {
/// The input locations and weights.
pub inputs: Vec<(NeuronLocation, f32)>,
Expand Down Expand Up @@ -473,6 +613,7 @@ impl NeuronTopology {

/// A pseudo-pointer of sorts used to make structural conversions very fast and easy to write.
#[derive(Hash, Clone, Copy, Debug, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))]
pub enum NeuronLocation {
/// Points to a neuron in the input layer at contained index.
Input(usize),
Expand Down