Skip to content

Commit

Permalink
Add perceptron
Browse files Browse the repository at this point in the history
  • Loading branch information
opixelum committed Mar 6, 2024
1 parent 51ad5f1 commit 156b469
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 56 deletions.
52 changes: 11 additions & 41 deletions rust/src/ai/layer.rs
Original file line number Diff line number Diff line change
@@ -1,49 +1,19 @@
use crate::ai::tensor::Tensor;
use std::iter::Sum;
use std::ops::{Add, Mul};
use crate::ai::activation::*;
use crate::ai::neuron::Neuron;

pub trait Layer<T> {
fn forward(&self, input: &Tensor<T>) -> Tensor<T>;
// TODO: Add backpropagation
pub struct Layer {
pub neurons: Vec<Neuron>,
pub activation: Activation,
}

pub struct Flatten;
impl Layer {
pub fn forward(&self) -> Vec<f64> {
let mut outputs = Vec::new();

impl<T: Default + Clone> Layer<T> for Flatten {
fn forward(&self, input: &Tensor<T>) -> Tensor<T> {
Tensor {
data: input.data.clone(),
shape: vec![input.shape.iter().product()],
for neuron in self.neurons.iter() {
outputs.push((self.activation)(neuron.forward()))
}
}
}

pub struct Dense<T> {
weights: Tensor<T>,
biases: Tensor<T>,
}

impl<T> Dense<T>
where
T: Default + Clone + Add<Output = T> + Mul<Output = T> + Sum, // other necessary traits
{
pub fn new(input_size: usize, output_size: usize) -> Dense<T> {
let weights = Tensor::new(vec![output_size, input_size]); // Initialize with random values
let biases = Tensor::new(vec![output_size]); // Initialize with zeros or small constants

Dense { weights, biases }
}
}

/*
impl<T> Layer<T> for Dense<T>
where
T: Default + Clone + Add<Output = T> + Mul<Output = T> + Sum, // other necessary traits
{
fn forward(&self, input: &Tensor<T>) -> Tensor<T> {
// Implement the forward pass for Dense layer
// Typically involves matrix multiplication with input and adding biases
// Return the resulting tensor
outputs
}
}
*/
29 changes: 14 additions & 15 deletions rust/tests/ai/layer.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,19 @@
use opixelib::ai::activation::*;
use opixelib::ai::layer::*;
use opixelib::ai::tensor::Tensor;
use opixelib::ai::neuron::Neuron;

#[test]
fn test_flatten_forward() {
let mut input = Tensor::<u8>::new(vec![2, 2, 2]);
input
.set(&[0, 0, 0], 55)
.expect("Failed to set flatten input");
input
.set(&[0, 1, 0], 55)
.expect("Failed to set flatten input");
input
.set(&[1, 1, 1], 55)
.expect("Failed to set flatten input");
fn test_layer_forward() {
let neuron: Neuron = Neuron {
inputs: vec![1.0, 2.0, 3.0],
weights: vec![4.0, 5.0, 6.0],
bias: 10.0,
};

let output = Flatten.forward(&input);
assert_eq!(output.data, vec![55, 0, 55, 0, 0, 0, 0, 55]);
assert_eq!(output.shape, vec![8]);
let perceptron = Layer {
neurons: vec![neuron],
activation: heavyside,
};

assert_eq!(perceptron.forward(), vec![1.0])
}

0 comments on commit 156b469

Please sign in to comment.