Skip to content

Commit

Permalink
Added native implementation for FusedLeakyReLU
Browse files Browse the repository at this point in the history
  • Loading branch information
rosinality committed Jul 9, 2020
1 parent 488d960 commit be05887
Showing 1 changed file with 14 additions and 4 deletions.
18 changes: 14 additions & 4 deletions op/fused_act.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@

module_path = os.path.dirname(__file__)
fused = load(
'fused',
"fused",
sources=[
os.path.join(module_path, 'fused_bias_act.cpp'),
os.path.join(module_path, 'fused_bias_act_kernel.cu'),
os.path.join(module_path, "fused_bias_act.cpp"),
os.path.join(module_path, "fused_bias_act_kernel.cu"),
],
)

Expand Down Expand Up @@ -83,4 +83,14 @@ def forward(self, input):


def fused_leaky_relu(input, bias, negative_slope=0.2, scale=2 ** 0.5):
return FusedLeakyReLUFunction.apply(input, bias, negative_slope, scale)
if input.device.type == "cpu":
rest_dim = [1] * (input.ndim - bias.ndim - 1)
return (
F.leaky_relu(
input + bias.view(1, bias.shape[0], *rest_dim), negative_slope=0.2
)
* scale
)

else:
return FusedLeakyReLUFunction.apply(input, bias, negative_slope, scale)

0 comments on commit be05887

Please sign in to comment.