diff --git a/rust/src/ai/layer.rs b/rust/src/ai/layer.rs index 613f7aa..5326973 100644 --- a/rust/src/ai/layer.rs +++ b/rust/src/ai/layer.rs @@ -1,49 +1,19 @@ -use crate::ai::tensor::Tensor; -use std::iter::Sum; -use std::ops::{Add, Mul}; +use crate::ai::activation::*; +use crate::ai::neuron::Neuron; -pub trait Layer { - fn forward(&self, input: &Tensor) -> Tensor; - // TODO: Add backpropagation +pub struct Layer { + pub neurons: Vec, + pub activation: Activation, } -pub struct Flatten; +impl Layer { + pub fn forward(&self) -> Vec { + let mut outputs = Vec::new(); -impl Layer for Flatten { - fn forward(&self, input: &Tensor) -> Tensor { - Tensor { - data: input.data.clone(), - shape: vec![input.shape.iter().product()], + for neuron in self.neurons.iter() { + outputs.push((self.activation)(neuron.forward())) } - } -} - -pub struct Dense { - weights: Tensor, - biases: Tensor, -} - -impl Dense -where - T: Default + Clone + Add + Mul + Sum, // other necessary traits -{ - pub fn new(input_size: usize, output_size: usize) -> Dense { - let weights = Tensor::new(vec![output_size, input_size]); // Initialize with random values - let biases = Tensor::new(vec![output_size]); // Initialize with zeros or small constants - - Dense { weights, biases } - } -} -/* -impl Layer for Dense -where - T: Default + Clone + Add + Mul + Sum, // other necessary traits -{ - fn forward(&self, input: &Tensor) -> Tensor { - // Implement the forward pass for Dense layer - // Typically involves matrix multiplication with input and adding biases - // Return the resulting tensor + outputs } } -*/ diff --git a/rust/tests/ai/layer.rs b/rust/tests/ai/layer.rs index 3a8c8a0..66cf0a8 100644 --- a/rust/tests/ai/layer.rs +++ b/rust/tests/ai/layer.rs @@ -1,20 +1,19 @@ +use opixelib::ai::activation::*; use opixelib::ai::layer::*; -use opixelib::ai::tensor::Tensor; +use opixelib::ai::neuron::Neuron; #[test] -fn test_flatten_forward() { - let mut input = Tensor::::new(vec![2, 2, 2]); - input - .set(&[0, 0, 0], 55) - .expect("Failed to set flatten input"); - input - .set(&[0, 1, 0], 55) - .expect("Failed to set flatten input"); - input - .set(&[1, 1, 1], 55) - .expect("Failed to set flatten input"); +fn test_layer_forward() { + let neuron: Neuron = Neuron { + inputs: vec![1.0, 2.0, 3.0], + weights: vec![4.0, 5.0, 6.0], + bias: 10.0, + }; - let output = Flatten.forward(&input); - assert_eq!(output.data, vec![55, 0, 55, 0, 0, 0, 0, 55]); - assert_eq!(output.shape, vec![8]); + let perceptron = Layer { + neurons: vec![neuron], + activation: heavyside, + }; + + assert_eq!(perceptron.forward(), vec![1.0]) }