forked from tracel-ai/burn
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
b1df39e
commit 847243d
Showing
45 changed files
with
390 additions
and
70 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,30 @@ | ||
use crate::{ | ||
backend::{autodiff::ADTensor, Backend}, | ||
graph::{ | ||
node::{ForwardNode, ForwardNodeRef, ForwardNodeState}, | ||
ops::{ForwardUnaryRecordedOps, UnaryOps}, | ||
}, | ||
}; | ||
use std::sync::Arc; | ||
|
||
pub fn unary_ops_wrapper<B, O, const D1: usize, const D2: usize>( | ||
input: ForwardNodeRef<B::TensorPrimitive<D1>>, | ||
output: B::TensorPrimitive<D2>, | ||
ops: O, | ||
) -> ADTensor<D2, B> | ||
where | ||
B: Backend, | ||
O: UnaryOps<B::TensorPrimitive<D1>, B::TensorPrimitive<D2>> + 'static, | ||
{ | ||
let shape = *B::shape(&output); | ||
let state = ForwardNodeState::new(output); | ||
|
||
let ops = Arc::new(ops); | ||
let ops = ForwardUnaryRecordedOps::new(input.clone(), ops); | ||
let ops = Arc::new(ops); | ||
|
||
let node = ForwardNode::from_unary(&input, state, ops); | ||
let node = Arc::new(node); | ||
|
||
ADTensor { node, shape } | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,49 @@ | ||
use super::unary_ops_wrapper; | ||
use crate::{ | ||
backend::{ | ||
autodiff::{ADBackendDecorator, ADTensor}, | ||
Backend, | ||
}, | ||
graph::ops::{UnaryOps, UnaryOpsNodeState}, | ||
ops::ModuleOps, | ||
}; | ||
|
||
#[derive(new, Debug)] | ||
struct EmbeddingBackward<B: Backend> { | ||
indexes: <B::IntegerBackend as Backend>::TensorPrimitive<2>, | ||
} | ||
|
||
impl<B: Backend> UnaryOps<B::TensorPrimitive<2>, B::TensorPrimitive<3>> for EmbeddingBackward<B> { | ||
fn partial( | ||
&self, | ||
state: &UnaryOpsNodeState<B::TensorPrimitive<2>, B::TensorPrimitive<3>>, | ||
) -> B::TensorPrimitive<2> { | ||
B::embedding_backward(&state.input.value, &state.output.grad(), &self.indexes) | ||
} | ||
} | ||
|
||
impl<B: Backend> ModuleOps<ADBackendDecorator<B>> for ADBackendDecorator<B> { | ||
fn embedding( | ||
weights: &<ADBackendDecorator<B> as Backend>::TensorPrimitive<2>, | ||
indexes: &<<ADBackendDecorator<B> as Backend>::IntegerBackend as Backend>::TensorPrimitive< | ||
2, | ||
>, | ||
) -> <ADBackendDecorator<B> as Backend>::TensorPrimitive<3> { | ||
let input = weights.node.clone(); | ||
let output = B::embedding(weights.tensor_ref(), indexes); | ||
let ops = EmbeddingBackward::<B>::new(indexes.clone()); | ||
|
||
unary_ops_wrapper(input, output, ops) | ||
} | ||
|
||
fn embedding_backward( | ||
weights: &<ADBackendDecorator<B> as Backend>::TensorPrimitive<2>, | ||
output: &<ADBackendDecorator<B> as Backend>::TensorPrimitive<3>, | ||
indexes: &<<ADBackendDecorator<B> as Backend>::IntegerBackend as Backend>::TensorPrimitive< | ||
2, | ||
>, | ||
) -> <ADBackendDecorator<B> as Backend>::TensorPrimitive<2> { | ||
let tensor = B::embedding_backward(weights.tensor_ref(), output.tensor_ref(), indexes); | ||
ADTensor::from_tensor(tensor) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,6 @@ | ||
mod activation; | ||
mod backend; | ||
mod module_ops; | ||
mod ops; | ||
mod shape; | ||
mod tensor; | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,56 @@ | ||
use super::{NdArrayBackend, NdArrayTensor}; | ||
use crate::{ops::*, NdArrayElement, Shape}; | ||
|
||
impl<E: NdArrayElement> ModuleOps<NdArrayBackend<E>> for NdArrayBackend<E> { | ||
fn embedding( | ||
weights: &NdArrayTensor<E, 2>, | ||
indexes: &NdArrayTensor<i64, 2>, | ||
) -> NdArrayTensor<E, 3> { | ||
let [batch_size, seq_length] = indexes.shape.dims; | ||
let [_n_embedding, d_model] = weights.shape.dims; | ||
|
||
let mut tensors = Vec::with_capacity(batch_size * seq_length); | ||
|
||
for index in indexes | ||
.reshape(Shape::new([batch_size * seq_length])) | ||
.array | ||
.iter() | ||
{ | ||
let index = *index as usize; | ||
tensors.push(weights.index([index..index + 1, 0..d_model])); | ||
} | ||
let embedding = TensorOpsCat::cat(tensors.iter().collect(), 0); | ||
embedding.reshape(Shape::new([batch_size, seq_length, d_model])) | ||
} | ||
|
||
fn embedding_backward( | ||
weights: &NdArrayTensor<E, 2>, | ||
output: &NdArrayTensor<E, 3>, | ||
indexes: &NdArrayTensor<i64, 2>, | ||
) -> NdArrayTensor<E, 2> { | ||
let [batch_size, seq_length] = indexes.shape.dims; | ||
let [_n_embedding, d_model] = weights.shape.dims; | ||
|
||
let mut weights_grad = weights.zeros(); | ||
let output = output.reshape(Shape::new([batch_size * seq_length, d_model])); | ||
|
||
for (index_output, index) in indexes | ||
.reshape(Shape::new([batch_size * seq_length])) | ||
.array | ||
.iter() | ||
.enumerate() | ||
{ | ||
let index = *index as usize; | ||
|
||
let weights_grad_current = weights_grad.index([index..index + 1, 0..d_model]); | ||
let output_grad = output.index([index_output..index_output + 1, 0..d_model]); | ||
|
||
weights_grad = weights_grad.index_assign( | ||
[index..index + 1, 0..d_model], | ||
&output_grad.add(&weights_grad_current), | ||
); | ||
} | ||
|
||
weights_grad | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -10,7 +10,7 @@ where | |
Self { | ||
tensor, | ||
shape: self.shape, | ||
kind: self.kind.clone(), | ||
kind: self.kind, | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,5 +1,6 @@ | ||
mod activation; | ||
mod backend; | ||
mod module_ops; | ||
mod ops; | ||
mod tensor; | ||
mod tensor_ops; | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,38 @@ | ||
use super::{TchBackend, TchTensor}; | ||
use crate::{ops::ModuleOps, Shape, TchElement}; | ||
|
||
impl<E: TchElement> ModuleOps<TchBackend<E>> for TchBackend<E> { | ||
fn embedding(weights: &TchTensor<E, 2>, indexes: &TchTensor<i64, 2>) -> TchTensor<E, 3> { | ||
let tensor = tch::Tensor::embedding(&weights.tensor, &indexes.tensor, -1, false, false); | ||
let shape = Shape::from(tensor.size()); | ||
|
||
TchTensor { | ||
kind: weights.kind, | ||
tensor, | ||
shape, | ||
} | ||
} | ||
|
||
fn embedding_backward( | ||
weights: &TchTensor<E, 2>, | ||
output: &TchTensor<E, 3>, | ||
indexes: &TchTensor<i64, 2>, | ||
) -> TchTensor<E, 2> { | ||
let [n_embedding, _d_model] = weights.shape.dims; | ||
let tensor = tch::Tensor::embedding_backward( | ||
&output.tensor, | ||
&indexes.tensor, | ||
n_embedding as i64, | ||
-1, | ||
false, | ||
false, | ||
); | ||
let shape = Shape::from(tensor.size()); | ||
|
||
TchTensor { | ||
kind: weights.kind, | ||
tensor, | ||
shape, | ||
} | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.