Skip to content

Commit

Permalink
[js/webgpu] Add activation Tanh (#21540)
Browse files Browse the repository at this point in the history
Bug:#21467

### Description
<!-- Describe your changes. -->



### Motivation and Context
<!-- - Why is this change required? What problem does it solve?
- If it fixes an open issue, please link to the issue here. -->
  • Loading branch information
axinging authored Jul 29, 2024
1 parent 79537d0 commit 0d7cf30
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 0 deletions.
4 changes: 4 additions & 0 deletions js/web/lib/wasm/jsep/webgpu/ops/fuse-utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ export const getActivationSnippet =
baseType}(uniforms.beta)));`;
case 'LeakyRelu':
return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;
case 'Tanh':
return `let e2x = exp(-2.0 * abs(value));
value = sign(value) * (1.0 - e2x) / (1.0 + e2x);
`;
case '':
return '';
// TODO: adding other activations that can be fused.
Expand Down
33 changes: 33 additions & 0 deletions js/web/test/data/ops/fused-conv.jsonc
Original file line number Diff line number Diff line change
Expand Up @@ -430,5 +430,38 @@
]
}
]
},
{
"name": "fused conv with tanh",
"operator": "FusedConv",
"attributes": [
{ "name": "activation", "data": "Tanh", "type": "string" },
{ "name": "kernel_shape", "data": [2, 2], "type": "ints" }
],
"opset": { "domain": "com.microsoft", "version": 1 },
"cases": [
{
"name": "T[0]",
"inputs": [
{
"data": [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
"dims": [1, 1, 3, 3],
"type": "float32"
},
{
"data": [0.11, 0.12, 0.13, 0.14],
"dims": [1, 1, 2, 2],
"type": "float32"
}
],
"outputs": [
{
"data": [0.15572261810302734, 0.20409323275089264, 0.29770541191101074, 0.3425688147544861],
"dims": [1, 1, 2, 2],
"type": "float32"
}
]
}
]
}
]

0 comments on commit 0d7cf30

Please sign in to comment.