Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix building with latest rust. #3

Open
wants to merge 3 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ authors = [ "[email protected]" ]
[dependencies]
wheel_timer = "*"
rand = "*"
rand_hc = "*"
vec_map = "*"
num = "*"

Expand Down
10 changes: 5 additions & 5 deletions benches/bench_network.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
#![feature(test)]
#![feature(convert)]

extern crate test;
extern crate neural;
extern crate rand;
extern crate rand_hc;

use test::Bencher;

use std::default::Default;
use rand::{Rng, SeedableRng, StdRng};
use rand::prelude::*;
use rand_hc::Hc128Rng;

use neural::Network;
use neural::izhikevich::{IzhikevichNeuron, IzhikevichConfig};
Expand Down Expand Up @@ -89,8 +90,7 @@ fn bench_network_tick_all_to_all(bn: &mut Bencher) {

#[bench]
fn bench_network_tick_limited(bn: &mut Bencher) {
let seed: &[_] = &[1, 2, 3, 4];
let mut rng: StdRng = SeedableRng::from_seed(seed);
let mut rng = Hc128Rng::seed_from_u64(1234);
let mut network = Network::new(20);

let total_count = 100;
Expand Down Expand Up @@ -119,7 +119,7 @@ fn bench_network_tick_limited(bn: &mut Bencher) {
let mut i = 0;

while i < connectivity {
let m = rng.gen_range::<usize>(0, total_count);
let m: usize = rng.gen_range(0, total_count);
if n == m { // try again
continue;
}
Expand Down
7 changes: 0 additions & 7 deletions examples/compare.rs
Original file line number Diff line number Diff line change
@@ -1,19 +1,12 @@
#![feature(convert)]

extern crate neural;
extern crate rand;

use std::default::Default;
// use rand::{Rng, SeedableRng, StdRng};

use neural::Network;
use neural::izhikevich::{IzhikevichNeuron, IzhikevichConfig};
use neural::sym::{SymSynapse, SymConfig};

fn main() {
// let seed: &[_] = &[1, 2, 3, 4];
// let mut rng: StdRng = SeedableRng::from_seed(seed);
// let mut rng = rand::thread_rng();

let mut network = Network::<IzhikevichNeuron, SymSynapse>::new(20);
let duration = 20.0;
Expand Down
13 changes: 6 additions & 7 deletions examples/profile.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#![feature(convert)]

extern crate neural;
extern crate csv;
Expand All @@ -18,12 +17,12 @@ fn main() {
fs::create_dir_all(&path).ok();

let filepath_profile = path.join("profile.csv");
let mut writer_profile = csv::Writer::from_file(filepath_profile.as_path()).unwrap();
writer_profile.encode(("t", "i")).ok();
let mut writer_profile = csv::Writer::from_path(filepath_profile.as_path()).unwrap();
writer_profile.serialize(("t", "i")).ok();

let filepath_rate = path.join("profile_rate.csv");
let mut writer_rate = csv::Writer::from_file(filepath_rate.as_path()).unwrap();
writer_rate.encode(("t", "rate")).ok();
let mut writer_rate = csv::Writer::from_path(filepath_rate.as_path()).unwrap();
writer_rate.serialize(("t", "rate")).ok();

let mut network = Network::new(20);

Expand Down Expand Up @@ -89,11 +88,11 @@ fn main() {

let rate = oup.iter().filter(|&x| *x > 0.0).count();
println!("{:?}", (now, rate));
writer_rate.encode((now, rate)).unwrap();
writer_rate.serialize((now, rate)).unwrap();

for (n, &i) in oup.iter().enumerate() {
if i > 0.0 {
writer_profile.encode((now, n)).unwrap();
writer_profile.serialize((now, n)).unwrap();
}
}
}
Expand Down
10 changes: 4 additions & 6 deletions examples/rate.rs
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
#![feature(iter_arith)]
#![feature(convert)]

extern crate neural;
extern crate csv;
extern crate rand;
extern crate rand_hc;

use rand::{Rng, SeedableRng, StdRng};
use rand::prelude::*;
use rand_hc::Hc128Rng;

use neural::Network;
use neural::izhikevich::{IzhikevichNeuron, IzhikevichConfig};
Expand All @@ -30,8 +29,7 @@ fn main() {
let inp = vinp.as_mut_slice();
let oup = voup.as_mut_slice();

let seed: &[_] = &[1, 2, 3, 4];
let mut rng: StdRng = SeedableRng::from_seed(seed);
let mut rng = Hc128Rng::seed_from_u64(1234);

let rate = 500.0;
let dt = 1.0 / 1000.0; // 1ms
Expand Down
27 changes: 13 additions & 14 deletions examples/spikes.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
#![feature(convert)]

extern crate neural;
extern crate rand;
extern crate rand_hc;
extern crate csv;

use std::default::Default;
use std::path::Path;
use std::fs;
use rand::{Rng, SeedableRng, StdRng};
use rand::distributions::{Normal, IndependentSample};
use rand::prelude::*;
use rand_hc::Hc128Rng;
use rand::distributions::Normal;

use neural::Float;
use neural::Network;
Expand All @@ -22,15 +22,14 @@ fn main() {
fs::create_dir_all(&path).ok();

let filepath_spikes = path.join("spikes.csv");
let mut writer_spikes = csv::Writer::from_file(filepath_spikes.as_path()).unwrap();
writer_spikes.encode(("t", "i")).ok();
let mut writer_spikes = csv::Writer::from_path(filepath_spikes.as_path()).unwrap();
writer_spikes.serialize(("t", "i")).ok();

let filepath_rate = path.join("spikes_rate.csv");
let mut writer_rate = csv::Writer::from_file(filepath_rate.as_path()).unwrap();
writer_rate.encode(("t", "rate")).ok();
let mut writer_rate = csv::Writer::from_path(filepath_rate.as_path()).unwrap();
writer_rate.serialize(("t", "rate")).ok();

let seed: &[_] = &[1, 2, 3, 4];
let mut rng: StdRng = SeedableRng::from_seed(seed);
let mut rng = Hc128Rng::seed_from_u64(1234);
let mut network = Network::new(20);

let duration = 1000.0;
Expand Down Expand Up @@ -117,9 +116,9 @@ fn main() {
for n in 0..total_count {
// thalmic input
let i = if n < excitatory_count {
5.0 * norm.ind_sample(&mut rng)
5.0 * norm.sample(&mut rng)
} else {
2.0 * norm.ind_sample(&mut rng)
2.0 * norm.sample(&mut rng)
};

inp[n] = i as Float;
Expand All @@ -134,11 +133,11 @@ fn main() {

let rate = oup.iter().filter(|&x| *x > 0.0).count();
println!("{:?}", (now, rate));
writer_rate.encode((now, rate)).unwrap();
writer_rate.serialize((now, rate)).unwrap();

for (n, &i) in oup.iter().enumerate() {
if i > 0.0 {
writer_spikes.encode((now, n)).unwrap();
writer_spikes.serialize((now, n)).unwrap();
}
}
}
Expand Down
27 changes: 13 additions & 14 deletions examples/stdp.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
#![feature(convert)]

extern crate neural;
extern crate rand;
extern crate rand_hc;
extern crate csv;

use std::default::Default;
use std::path::Path;
use std::fs;
use rand::{Rng, SeedableRng, StdRng};
use rand::prelude::*;
use rand_hc::Hc128Rng;

use neural::Network;
use neural::izhikevich::{IzhikevichNeuron, IzhikevichConfig};
Expand All @@ -20,15 +20,14 @@ fn main() {
fs::create_dir_all(&path).ok();

let filepath_spikes = path.join("stdp.csv");
let mut writer_spikes = csv::Writer::from_file(filepath_spikes.as_path()).unwrap();
writer_spikes.encode(("t", "i")).ok();
let mut writer_spikes = csv::Writer::from_path(filepath_spikes.as_path()).unwrap();
writer_spikes.serialize(("t", "i")).ok();

let filepath_rate = path.join("stdp_rate.csv");
let mut writer_rate = csv::Writer::from_file(filepath_rate.as_path()).unwrap();
writer_rate.encode(("t", "rate")).ok();
let mut writer_rate = csv::Writer::from_path(filepath_rate.as_path()).unwrap();
writer_rate.serialize(("t", "rate")).ok();

let seed: &[_] = &[1, 2, 3, 4];
let mut rng: StdRng = SeedableRng::from_seed(seed);
let mut rng = Hc128Rng::seed_from_u64(1234);
let mut network = Network::new(20);

let duration = 1000.0;
Expand Down Expand Up @@ -82,7 +81,7 @@ fn main() {
let mut i = 0;

while i < connectivity {
let m = rng.gen_range::<usize>(0, total_count);
let m: usize = rng.gen_range(0, total_count);
if n == m { // try again
continue;
}
Expand All @@ -93,8 +92,8 @@ fn main() {
-5.0
};

let delay = if n < excitatory_count {
rng.gen_range::<usize>(1, max_delay)
let delay: usize = if n < excitatory_count {
rng.gen_range(1, max_delay)
} else {
1
};
Expand Down Expand Up @@ -145,11 +144,11 @@ fn main() {

let rate = oup.iter().filter(|&x| *x > 0.0).count();
println!("{:?}", (now, rate));
writer_rate.encode((now, rate)).unwrap();
writer_rate.serialize((now, rate)).unwrap();

for (n, &i) in oup.iter().enumerate() {
if i > 0.0 {
writer_spikes.encode((now, n)).unwrap();
writer_spikes.serialize((now, n)).unwrap();
}
}
}
Expand Down
12 changes: 6 additions & 6 deletions src/network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ impl<N: Neuron, S: Synapse> Network<N, S> {
}

pub fn add_synapse(&mut self, synapse: S, sendr_id: usize, recvr_id: usize) -> Result<usize, NeuralError> {
if !self.neurons.contains_key(&sendr_id) || !self.neurons.contains_key(&recvr_id) {
if !self.neurons.contains_key(sendr_id) || !self.neurons.contains_key(recvr_id) {
return Err(NeuralError::MissingNeuron)
}

Expand Down Expand Up @@ -118,7 +118,7 @@ impl<N: Neuron, S: Synapse> Network<N, S> {
for current_tick in 0..ticks {
let spikes = self.scheduler.tick();
for spike in spikes.iter() {
if let Some(neuron) = self.neurons.get_mut(&spike.recvr_id) {
if let Some(neuron) = self.neurons.get_mut(spike.recvr_id) {
neuron.recv(spike.v);
}
}
Expand All @@ -138,10 +138,10 @@ impl<N: Neuron, S: Synapse> Network<N, S> {

// On the incoming (receiving synapses), update them post-receival
if self.learning_enabled {
if let Some(recv_synapses) = self.recv_synapses.get_mut(&sendr_id) {
if let Some(recv_synapses) = self.recv_synapses.get_mut(sendr_id) {
// println!("recv_synapses: sendr_id: {:?} recv_synapses: {:?}", sendr_id, recv_synapses.len());
for synapse_id in recv_synapses.iter() {
if let Some(synapse) = self.synapses.get_mut(&synapse_id) {
if let Some(synapse) = self.synapses.get_mut(*synapse_id) {
synapse.post_recv(self.now);
// post_recv_count += 1;
}
Expand All @@ -150,10 +150,10 @@ impl<N: Neuron, S: Synapse> Network<N, S> {
}

// On the outgoing (sending synapses), update them pre-receival
if let Some(send_synapses) = self.send_synapses.get_mut(&sendr_id) {
if let Some(send_synapses) = self.send_synapses.get_mut(sendr_id) {
// println!("send_synapses: sendr_id: {:?} send_synapses: {:?}", sendr_id, send_synapses.len());
for &(recvr_id, synapse_id) in send_synapses.iter() {
if let Some(synapse) = self.synapses.get_mut(&synapse_id) {
if let Some(synapse) = self.synapses.get_mut(synapse_id) {
if self.learning_enabled {
synapse.pre_recv(self.now);
// pre_recv_count += 1;
Expand Down
4 changes: 2 additions & 2 deletions tests/test_network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ fn test_network() {
let now = network.tick(1, &mut inp, &mut oup);
assert_eq!(now, 2.0);
assert_eq!(oup[0], 0.0);
assert_eq!(oup[1], 0.0);
assert_eq!(oup[1], 30.0);
}

{
Expand All @@ -54,7 +54,7 @@ fn test_network() {
let now = network.tick(1, &mut inp, &mut oup);
assert_eq!(now, 3.0);
assert_eq!(oup[0], 0.0);
assert_eq!(oup[1], 30.0);
assert_eq!(oup[1], 0.0);
}

{
Expand Down
6 changes: 3 additions & 3 deletions tests/test_neuron.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ fn run(t: Test) {

let filepath = path.join(&format!("{}.csv", t.name));

let mut writer = csv::Writer::from_file(&filepath.as_path()).unwrap();
writer.encode(("t", "I", "V", "u", "spike")).ok();
let mut writer = csv::Writer::from_path(&filepath.as_path()).unwrap();
writer.serialize(("t", "I", "V", "u", "spike")).ok();

while now < t.timespan {
let ip = (t.input)(now);
Expand All @@ -46,7 +46,7 @@ fn run(t: Test) {
spikes = spikes + 1;
}

writer.encode((now, ip, neuron.v, neuron.u, spike)).ok();
writer.serialize((now, ip, neuron.v, neuron.u, spike)).ok();

now = now + t.tau;
}
Expand Down
12 changes: 6 additions & 6 deletions tests/test_stdp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ fn test_stdp_ltp() {
fs::create_dir_all(&path).ok();

let filepath = path.join("stdp_ltp.csv");
let mut writer = csv::Writer::from_file(filepath.as_path()).unwrap();
let mut writer = csv::Writer::from_path(filepath.as_path()).unwrap();

writer.encode(("t", "d")).ok();
writer.serialize(("t", "d")).ok();

let mut tau = 0.0;
while tau < 40.0 {
Expand All @@ -38,7 +38,7 @@ fn test_stdp_ltp() {
now = now + tau;
let delta = synapse.post_recv(now);

writer.encode((-1.0 * tau, delta)).ok();
writer.serialize((-1.0 * tau, delta)).ok();
tau = tau + 0.1;
}
}
Expand All @@ -50,9 +50,9 @@ fn test_stdp_ltd() {
fs::create_dir_all(&path).ok();

let filepath = path.join("stdp_ltd.csv");
let mut writer = csv::Writer::from_file(&filepath.as_path()).unwrap();
let mut writer = csv::Writer::from_path(&filepath.as_path()).unwrap();

writer.encode(("t", "d")).ok();
writer.serialize(("t", "d")).ok();

let mut tau = 0.0;
while tau < 40.0 {
Expand All @@ -69,7 +69,7 @@ fn test_stdp_ltd() {
now = now + tau;
let delta = synapse.pre_recv(now);

writer.encode((tau, delta)).ok();
writer.serialize((tau, delta)).ok();
tau = tau + 0.1;
}
}
Loading