Skip to content

Commit

Permalink
Rename GELU to Gelu (tracel-ai#1311)
Browse files Browse the repository at this point in the history
* Rename GELU to Gelu

This is to follow Rust naming convention
  • Loading branch information
antimora authored Feb 15, 2024
1 parent d1273d4 commit dfb739c
Show file tree
Hide file tree
Showing 9 changed files with 26 additions and 26 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -508,7 +508,7 @@ pub struct PositionWiseFeedForward<B: Backend> {
linear_inner: nn::Linear<B>,
linear_outer: nn::Linear<B>,
dropout: nn::Dropout,
gelu: nn::GELU,
gelu: nn::Gelu,
}

impl<B: Backend> PositionWiseFeedForward<B> {
Expand Down
4 changes: 2 additions & 2 deletions burn-book/src/building-blocks/module.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ pub struct PositionWiseFeedForward<B: Backend> {
linear_inner: Linear<B>,
linear_outer: Linear<B>,
dropout: Dropout,
gelu: GELU,
gelu: Gelu,
}
impl<B: Backend> PositionWiseFeedForward<B> {
Expand Down Expand Up @@ -113,7 +113,7 @@ Burn comes with built-in modules that you can use to build your own modules.
| `LayerNorm` | `nn.LayerNorm` |
| `GroupNorm` | `nn.GroupNorm` |
| `Dropout` | `nn.Dropout` |
| `GELU` | `nn.GELU` |
| `Gelu` | `nn.Gelu` |
| `Linear` | `nn.Linear` |
| `Embedding` | `nn.Embedding` |
| `Relu` | `nn.ReLU` |
Expand Down
6 changes: 3 additions & 3 deletions burn-core/src/nn/attention/mha.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ pub struct MultiHeadAttention<B: Backend> {
value: nn::Linear<B>,
output: nn::Linear<B>,
dropout: nn::Dropout,
activation: nn::GELU,
activation: nn::Gelu,
n_heads: usize,
d_k: usize,
min_float: f64,
Expand Down Expand Up @@ -87,7 +87,7 @@ impl MultiHeadAttentionConfig {
value: linear(self),
output: linear(self),
dropout: nn::DropoutConfig::new(self.dropout).init(),
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
n_heads: self.n_heads,
d_k: self.d_model / self.n_heads,
min_float: self.min_float,
Expand All @@ -111,7 +111,7 @@ impl MultiHeadAttentionConfig {
value: linear(self, record.value),
output: linear(self, record.output),
dropout: nn::DropoutConfig::new(self.dropout).init(),
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
n_heads: self.n_heads,
d_k: self.d_model / self.n_heads,
min_float: self.min_float,
Expand Down
4 changes: 2 additions & 2 deletions burn-core/src/nn/gelu.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,9 +6,9 @@ use crate::tensor::Tensor;

/// Applies the Gaussian Error Linear Units function element-wise.
#[derive(Module, Clone, Debug, Default)]
pub struct GELU {}
pub struct Gelu {}

impl GELU {
impl Gelu {
/// Create the module.
pub fn new() -> Self {
Self {}
Expand Down
8 changes: 4 additions & 4 deletions burn-core/src/nn/transformer/pwff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use crate::nn::Initializer;
use crate::{
config::Config,
module::Module,
nn::{Dropout, DropoutConfig, Linear, LinearConfig, GELU},
nn::{Dropout, DropoutConfig, Gelu, Linear, LinearConfig},
tensor::{backend::Backend, Tensor},
};

Expand Down Expand Up @@ -36,7 +36,7 @@ pub struct PositionWiseFeedForward<B: Backend> {
linear_inner: Linear<B>,
linear_outer: Linear<B>,
dropout: Dropout,
gelu: GELU,
gelu: Gelu,
}

impl PositionWiseFeedForwardConfig {
Expand All @@ -50,7 +50,7 @@ impl PositionWiseFeedForwardConfig {
.with_initializer(self.initializer.clone())
.init(device),
dropout: DropoutConfig::new(self.dropout).init(),
gelu: GELU::new(),
gelu: Gelu::new(),
}
}
/// Initialize a new [position-wise feed-forward](PositionWiseFeedForward) module with a
Expand All @@ -63,7 +63,7 @@ impl PositionWiseFeedForwardConfig {
linear_inner: LinearConfig::new(self.d_model, self.d_ff).init_with(record.linear_inner),
linear_outer: LinearConfig::new(self.d_ff, self.d_model).init_with(record.linear_outer),
dropout: DropoutConfig::new(self.dropout).init(),
gelu: GELU::new(),
gelu: Gelu::new(),
}
}
}
Expand Down
4 changes: 2 additions & 2 deletions burn-no-std-tests/src/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use burn::{
pub struct ConvBlock<B: Backend> {
conv: nn::conv::Conv2d<B>,
pool: nn::pool::MaxPool2d,
activation: nn::GELU,
activation: nn::Gelu,
}

#[derive(Config)]
Expand All @@ -29,7 +29,7 @@ impl<B: Backend> ConvBlock<B> {
let pool = nn::pool::MaxPool2dConfig::new(config.kernel_size)
.with_padding(nn::PaddingConfig2d::Same)
.init();
let activation = nn::GELU::new();
let activation = nn::Gelu::new();

Self {
conv,
Expand Down
8 changes: 4 additions & 4 deletions examples/mnist-inference-web/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -44,10 +44,10 @@ values.
Layers:

1. Input Image (28,28, 1ch)
2. `Conv2d`(3x3, 8ch), `BatchNorm2d`, `GELU`
3. `Conv2d`(3x3, 16ch), `BatchNorm2d`, `GELU`
4. `Conv2d`(3x3, 24ch), `BatchNorm2d`, `GELU`
5. `Linear`(11616, 32), `GELU`
2. `Conv2d`(3x3, 8ch), `BatchNorm2d`, `Gelu`
3. `Conv2d`(3x3, 16ch), `BatchNorm2d`, `Gelu`
4. `Conv2d`(3x3, 24ch), `BatchNorm2d`, `Gelu`
5. `Linear`(11616, 32), `Gelu`
6. `Linear`(32, 10)
7. Softmax Output

Expand Down
8 changes: 4 additions & 4 deletions examples/mnist-inference-web/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub struct Model<B: Backend> {
dropout: nn::Dropout,
fc1: nn::Linear<B>,
fc2: nn::Linear<B>,
activation: nn::GELU,
activation: nn::Gelu,
}

const NUM_CLASSES: usize = 10;
Expand All @@ -43,7 +43,7 @@ impl<B: Backend> Model<B> {
fc1,
fc2,
dropout,
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
}
}

Expand All @@ -70,7 +70,7 @@ impl<B: Backend> Model<B> {
pub struct ConvBlock<B: Backend> {
conv: nn::conv::Conv2d<B>,
norm: BatchNorm<B, 2>,
activation: nn::GELU,
activation: nn::Gelu,
}

impl<B: Backend> ConvBlock<B> {
Expand All @@ -83,7 +83,7 @@ impl<B: Backend> ConvBlock<B> {
Self {
conv,
norm,
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
}
}

Expand Down
8 changes: 4 additions & 4 deletions examples/mnist/src/model.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ pub struct Model<B: Backend> {
dropout: nn::Dropout,
fc1: nn::Linear<B>,
fc2: nn::Linear<B>,
activation: nn::GELU,
activation: nn::Gelu,
}

impl<B: Backend> Default for Model<B> {
Expand Down Expand Up @@ -51,7 +51,7 @@ impl<B: Backend> Model<B> {
dropout,
fc1,
fc2,
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
}
}

Expand Down Expand Up @@ -92,7 +92,7 @@ impl<B: Backend> Model<B> {
pub struct ConvBlock<B: Backend> {
conv: nn::conv::Conv2d<B>,
norm: BatchNorm<B, 2>,
activation: nn::GELU,
activation: nn::Gelu,
}

impl<B: Backend> ConvBlock<B> {
Expand All @@ -105,7 +105,7 @@ impl<B: Backend> ConvBlock<B> {
Self {
conv,
norm,
activation: nn::GELU::new(),
activation: nn::Gelu::new(),
}
}

Expand Down

0 comments on commit dfb739c

Please sign in to comment.