Skip to content

Commit

Permalink
Layer parameters methods return impl Iterator
Browse files Browse the repository at this point in the history
  • Loading branch information
charles-r-earp committed Feb 29, 2024
1 parent af2a756 commit f94a76c
Show file tree
Hide file tree
Showing 7 changed files with 150 additions and 153 deletions.
3 changes: 1 addition & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ parking_lot = { workspace = true, optional = true }
rayon.workspace = true
once_cell = { version = "1.17.1", optional = true, features = ["std"] }
num-traits = "0.2.15"
smallvec = { version = "1.11.1", optional = true }
matrixmultiply_mt = { version = "0.2.1", optional = true }
matrixmultiply = { version = "0.3.8", optional = true }
wide = "0.7.13"
Expand All @@ -91,7 +90,7 @@ dataset = ["dep:rand"]
iris = []
mnist = ["dataset", "dep:dirs", "dep:flate2", "dep:downloader", "dep:byteorder", "dep:http"]
learn = []
neural-network = ["learn", "dep:autograph_derive", "dep:crossbeam-channel", "dep:parking_lot", "dep:rand", "dep:once_cell", "dep:smallvec"]
neural-network = ["learn", "dep:autograph_derive", "dep:crossbeam-channel", "dep:parking_lot", "dep:rand", "dep:once_cell"]


[package.metadata.krnlc]
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ model.set_training(true)?;
let y = model.forward(x)?;
let loss = y.cross_entropy_loss(t)?;
loss.backward()?;
for parameter in model.parameters_mut()? {
for parameter in model.make_parameters_mut()? {
optimizer.update(learning_rate, parameter)?;
}
```
Expand Down
99 changes: 44 additions & 55 deletions autograph_derive/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,35 +1,8 @@
/*!
# Usage
You can derive Layer and Forward for structs and enums:
```text
use autograph::{
anyhow::Result,
learn::neural_network::{
autograd::{Variable4, Variable2},
layer::{Layer, Forward, Flatten, Conv2, Relu, MaxPool2, Dense},
},
};
// Layer and Forward can be derived for structs composed of layers.
#[derive(Layer, Forward)]
#[autograph(forward(Variable4, Output=Variable2))]
struct Network {
conv: Conv2<Relu>,
flatten: Flatten,
dense: Dense,
}
#![forbid(unsafe_code)]

// Can also be applied to enums.
#[derive(Layer, Forward)]
#[autograph(forward(Variable4, Output=Variable4))]
enum Dynamic {
Conv(Conv2),
Pool(MaxPool2),
}
```
/*!
Derive macros for [**autograph**](https://docs.rs/autograph).
*/
// TOOD: move docs to autograph::neural_network::layer
// TODO: remove `#[layer]` attribute.

use derive_syn_parse::Parse;
use proc_macro::TokenStream;
Expand Down Expand Up @@ -170,44 +143,64 @@ impl Layers {
}
}
}
fn collect(&self, method: Ident) -> TokenStream2 {
fn iter(&self, method: Ident) -> TokenStream2 {
match self {
Self::Struct(layers) => {
quote! {
::std::iter::empty()
#(.chain(self.#layers.#method()))*
.collect()
}
}
Self::Enum(layers) => {
quote! {
match self {
#(
Self::#layers(layer) => layer.#method(),
)*
}
::std::iter::empty()
#(
.chain((if let Self::#layers(layer) = self {
Some(layer.#method())
} else {
None
}).into_iter().flatten())
)*
}
}
}
}
fn try_collect(&self, method: Ident) -> TokenStream2 {
fn try_iter_mut(&self, method: Ident) -> TokenStream2 {
match self {
Self::Struct(layers) => {
quote! {
Ok(
::std::iter::empty()
#(.chain(self.#layers.#method()?))*
.collect()
)
}
}
Self::Enum(layers) => {
let some_layer = quote! { Some(layer) };
let none = quote! { None };
let match_arms = layers.iter().enumerate().map(|(i, layer)| {
let fields =
(0..layers.len()).map(|u| if i == u { &some_layer } else { &none });
quote! {
Self::#layer(layer) => (#(#fields),*)
}
});
let iters = (0 .. layers.len()).map(|u| {
let index = Index::from(u);
quote! {
layers.#index.map(|layer| layer.#method()).transpose()?.into_iter().flatten()
}
});
quote! {
match self {
let layers = match self {
#(#match_arms),*
};
Ok(
::std::iter::empty()
#(
Self::#layers(layer) => layer.#method(),
.chain(#iters)
)*
}
)
}
}
}
Expand All @@ -227,7 +220,7 @@ impl Layers {
quote! {
match self {
#(
Self::#layers(layer) => Ok(Self::#layers(layer.#method()?)),
Self::#layers(layer) => Ok(Self::#layers(layer.#method(#arg)?)),
)*
}
}
Expand Down Expand Up @@ -282,23 +275,23 @@ fn layer_impl(input: TokenStream2) -> Result<TokenStream2> {
let autograph = autograph_crate(&input.attrs)?;
let ident = &input.ident;
let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();
let parameters = layers.iter(format_ident!("parameters"));
let make_parameters_mut = layers.try_iter_mut(format_ident!("make_parameters_mut"));
let set_training = layers.try_for_each(format_ident!("set_training"), quote! { training });
let parameters = layers.collect(format_ident!("parameters"));
let parameters_mut = layers.try_collect(format_ident!("parameters_mut"));
let cast_mut = layers.try_for_each(format_ident!("cast_mut"), quote!(scalar_type));
let to_device_mut = layers.try_for_each(format_ident!("to_device_mut"), quote!(device.clone()));
let into_device = layers.try_map(format_ident!("into_device"), quote! { device.clone() });
Ok(quote! {
#[automatically_derived]
impl #impl_generics Layer for #ident #ty_generics #where_clause {
fn set_training(&mut self, training: bool) -> #autograph::anyhow::Result<()> {
#set_training
}
fn parameters(&self) -> #autograph::learn::neural_network::layer::ParameterVec {
fn parameters(&self) -> impl ::std::iter::Iterator<Item=#autograph::learn::neural_network::autograd::ParameterD> + '_ {
#parameters
}
fn parameters_mut(&mut self) -> #autograph::anyhow::Result<#autograph::learn::neural_network::layer::ParameterMutVec> {
#parameters_mut
fn make_parameters_mut(&mut self) -> #autograph::anyhow::Result<impl ::std::iter::Iterator<Item= #autograph::learn::neural_network::autograd::ParameterViewMutD> + '_> {
#make_parameters_mut
}
fn set_training(&mut self, training: bool) -> #autograph::anyhow::Result<()> {
#set_training
}
fn cast_mut(&mut self, scalar_type: #autograph::krnl::scalar::ScalarType) -> #autograph::anyhow::Result<()> {
#cast_mut
Expand All @@ -315,8 +308,6 @@ fn layer_impl(input: TokenStream2) -> Result<TokenStream2> {
}

/// Derive for Layer.
///
/// See [`autograph_derive`](crate).
#[proc_macro_derive(Layer, attributes(autograph, layer))]
pub fn layer(input: TokenStream) -> TokenStream {
match layer_impl(input.into()) {
Expand Down Expand Up @@ -370,8 +361,6 @@ fn forward_impl(input: TokenStream2) -> Result<TokenStream2> {
}

/// Derive for Forward.
///
/// See [`autograph_derive`](crate).
#[proc_macro_derive(Forward, attributes(autograph, layer))]
pub fn forward(input: TokenStream) -> TokenStream {
match forward_impl(input.into()) {
Expand Down
2 changes: 1 addition & 1 deletion benches/neural-network-benches/src/autograph_backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ impl LeNet5Classifier {
loss.backward()?;
let optimizer = self.optimizer.as_ref().unwrap();
let learning_rate = 0.01;
for parameter in self.model.parameters_mut()? {
for parameter in self.model.make_parameters_mut()? {
optimizer.update(learning_rate, parameter)?;
}
self.model.set_training(false)?;
Expand Down
3 changes: 1 addition & 2 deletions examples/neural-network-mnist/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,6 @@ fn main() -> Result<()> {
println!("model: {model:#?}");
let parameter_count = model
.parameters()
.iter()
.map(|x| x.raw_dim().size())
.sum::<usize>();
println!(
Expand Down Expand Up @@ -303,7 +302,7 @@ fn train<I: Iterator<Item = Result<(Tensor4<u8>, Tensor1<u8>)>>>(
.into_array()?
.into_scalar();
loss.backward()?;
for parameter in model.parameters_mut()? {
for parameter in model.make_parameters_mut()? {
optimizer.update(learning_rate, parameter)?;
}
model.set_training(false)?;
Expand Down
2 changes: 1 addition & 1 deletion src/learn/neural_network.rs
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ model.set_training(true)?;
let y = model.forward(x)?;
let loss = y.cross_entropy_loss(t)?;
loss.backward()?;
for parameter in model.parameters_mut()? {
for parameter in model.make_parameters_mut()? {
optimizer.update(learning_rate, parameter)?;
}
# Ok(())
Expand Down
Loading

0 comments on commit f94a76c

Please sign in to comment.