Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: Allow partial execution of network #51

Merged
merged 18 commits into from
Feb 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,4 @@ target/

*.test.bin
*.test.st
_test/run.ts
_test/Student_Performance.csv
_test/
2 changes: 1 addition & 1 deletion crates/core-gpu/src/ffi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ pub extern "C" fn ffi_backend_predict(

RESOURCES.with(|cell| {
let mut backend = cell.backend.borrow_mut();
let res = backend[id].predict(inputs);
let res = backend[id].predict(inputs, options.layers);
outputs.copy_from_slice(res.as_slice().unwrap());
});
}
Expand Down
29 changes: 22 additions & 7 deletions crates/core-gpu/src/gpu/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -208,9 +208,24 @@ impl Backend {
}
}

pub fn forward_propagate(&mut self, mut inputs: ArrayD<f32>, training: bool) -> ArrayD<f32> {
for layer in &mut self.layers {
inputs = layer.forward_propagate(inputs, training);
pub fn forward_propagate(
&mut self,
mut inputs: ArrayD<f32>,
training: bool,
layers: Option<Vec<usize>>,
) -> ArrayD<f32> {
match layers {
Some(layer_indices) => {
for layer_index in layer_indices {
let layer = self.layers.get_mut(layer_index).expect(&format!("Layer #{} does not exist.", layer_index));
inputs = layer.forward_propagate(inputs, training);
}
}
None => {
for layer in &mut self.layers {
inputs = layer.forward_propagate(inputs, training);
}
}
}
inputs
}
Expand All @@ -232,7 +247,7 @@ impl Backend {
while epoch < epochs {
let mut total = 0.0;
for (i, dataset) in datasets.iter().enumerate() {
let outputs = self.forward_propagate(dataset.inputs.clone(), true);
let outputs = self.forward_propagate(dataset.inputs.clone(), true, None);
self.backward_propagate(outputs.view(), dataset.outputs.view());
self.optimizer
.update_grads(&mut self.layers, &self.scheduler, rate, epoch);
Expand All @@ -249,11 +264,11 @@ impl Backend {
}
}

pub fn predict(&mut self, data: ArrayD<f32>) -> ArrayD<f32> {
pub fn predict(&mut self, data: ArrayD<f32>, layers: Option<Vec<usize>>) -> ArrayD<f32> {
for layer in &mut self.layers {
layer.reset(1)
layer.reset(1);
}
self.forward_propagate(data, false)
self.forward_propagate(data, false, layers)
}

pub fn save(&self) -> Vec<u8> {
Expand Down
1 change: 1 addition & 0 deletions crates/core-gpu/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -181,6 +181,7 @@ pub struct TrainOptions {
pub struct PredictOptions {
pub input_shape: Vec<usize>,
pub output_shape: Vec<usize>,
pub layers: Option<Vec<usize>>,
}

#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
Expand Down
29 changes: 22 additions & 7 deletions crates/core/src/cpu/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -95,9 +95,24 @@ impl Backend {
}
}

pub fn forward_propagate(&mut self, mut inputs: ArrayD<f32>, training: bool) -> ArrayD<f32> {
for layer in &mut self.layers {
inputs = layer.forward_propagate(inputs, training);
pub fn forward_propagate(
&mut self,
mut inputs: ArrayD<f32>,
training: bool,
layers: Option<Vec<usize>>,
) -> ArrayD<f32> {
match layers {
Some(layer_indices) => {
for layer_index in layer_indices {
let layer = self.layers.get_mut(layer_index).expect(&format!("Layer #{} does not exist.", layer_index));
inputs = layer.forward_propagate(inputs, training);
}
}
None => {
for layer in &mut self.layers {
inputs = layer.forward_propagate(inputs, training);
}
}
}
inputs
}
Expand All @@ -119,7 +134,7 @@ impl Backend {
while epoch < epochs {
let mut total = 0.0;
for (i, dataset) in datasets.iter().enumerate() {
let outputs = self.forward_propagate(dataset.inputs.clone(), true);
let outputs = self.forward_propagate(dataset.inputs.clone(), true, None);
self.backward_propagate(outputs.view(), dataset.outputs.view());
self.optimizer
.update_grads(&mut self.layers, &self.scheduler, rate, epoch);
Expand All @@ -136,11 +151,11 @@ impl Backend {
}
}

pub fn predict(&mut self, data: ArrayD<f32>) -> ArrayD<f32> {
pub fn predict(&mut self, data: ArrayD<f32>, layers: Option<Vec<usize>>) -> ArrayD<f32> {
for layer in &mut self.layers {
layer.reset(1)
layer.reset(1);
}
self.forward_propagate(data, false)
self.forward_propagate(data, false, layers)
}

pub fn save(&self) -> Vec<u8> {
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/cpu/cost.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ impl CPUCost {

fn mse<'a>(y_hat: ArrayViewD<'a, f32>, y: ArrayViewD<'a, f32>) -> f32 {
let sub = y.sub(&y_hat);
return sub.clone().mul(sub).sum();
return sub.clone().mul(sub).sum() / y.len() as f32;
}

fn mse_prime<'a>(y_hat: ArrayViewD<'a, f32>, y: ArrayViewD<'a, f32>) -> ArrayD<f32> {
Expand Down
9 changes: 7 additions & 2 deletions crates/core/src/ffi.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,12 @@ fn log(string: String) {
pub extern "C" fn ffi_backend_create(ptr: *const u8, len: usize, alloc: AllocBufferFn) -> usize {
let config = decode_json(ptr, len);
let net_backend = Backend::new(config, Logger { log }, None);
let buf: Vec<u8> = net_backend.size.iter().map(|x| *x as u8).collect();
let buf: Vec<u8> = net_backend
.size
.iter()
.map(|x| *x as u32)
.flat_map(|x| x.to_le_bytes().to_vec())
.collect();
let size_ptr = alloc(buf.len());
let output_shape = unsafe { from_raw_parts_mut(size_ptr, buf.len()) };
output_shape.copy_from_slice(buf.as_slice());
Expand Down Expand Up @@ -70,7 +75,7 @@ pub extern "C" fn ffi_backend_predict(

RESOURCES.with(|cell| {
let mut backend = cell.backend.borrow_mut();
let res = backend[id].predict(inputs);
let res = backend[id].predict(inputs, options.layers);
outputs.copy_from_slice(res.as_slice().unwrap());
});
}
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/types.rs
Original file line number Diff line number Diff line change
Expand Up @@ -181,4 +181,5 @@ pub struct TrainOptions {
pub struct PredictOptions {
pub input_shape: Vec<usize>,
pub output_shape: Vec<usize>,
pub layers: Option<Vec<usize>>,
}
2 changes: 1 addition & 1 deletion crates/core/src/wasm.rs
Original file line number Diff line number Diff line change
Expand Up @@ -59,11 +59,11 @@
let options: PredictOptions = serde_json::from_str(&options).unwrap();
let inputs = ArrayD::from_shape_vec(options.input_shape, buffer.to_vec()).unwrap();

let mut res = ArrayD::zeros(options.output_shape);

Check warning on line 62 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build macos-latest

variable does not need to be mutable

Check warning on line 62 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build ubuntu-latest

variable does not need to be mutable

Check warning on line 62 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build windows-latest

variable does not need to be mutable

RESOURCES.with(|cell| {
let mut backend = cell.backend.borrow_mut();
res = backend[id].predict(inputs);
let res = backend[id].predict(inputs, options.layers);

Check warning on line 66 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build macos-latest

unused variable: `res`

Check warning on line 66 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build ubuntu-latest

unused variable: `res`

Check warning on line 66 in crates/core/src/wasm.rs

View workflow job for this annotation

GitHub Actions / Build windows-latest

unused variable: `res`
});
Float32Array::from(res.as_slice().unwrap())
}
Expand Down
1 change: 1 addition & 0 deletions deno.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
"example:multiclass": "deno run -A --unstable ./examples/classification/iris.ts",
"example:text": "deno run -A --unstable ./examples/classification/spam.ts",
"example:filters": "deno run -A --unstable examples/filters/conv.ts ",
"example:autoencoder": "deno run -A --unstable examples/autoencoders/test.ts ",
"example:train": "deno run -A --unstable examples/model/train.ts ",
"example:run": "deno run -A --unstable examples/model/run.ts ",
"example:mnist-download": "deno run -A --unstable examples/mnist/download.ts ",
Expand Down
Loading
Loading