Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

activation intrinsics for neural networks #860

Open
wants to merge 44 commits into
base: master
Choose a base branch
from
Open
Changes from 1 commit
Commits
Show all changes
44 commits
Select commit Hold shift + click to select a range
2ff7029
start working on activations module
jalvesz Aug 13, 2024
7d1c6ad
softmax for ranks from 1 to 4
jalvesz Aug 15, 2024
c1303e7
move activations to specialfunctions, add specs
jalvesz Aug 17, 2024
f22756a
fix float constant definition
jalvesz Aug 17, 2024
b1a4180
fix float constant definition
jalvesz Aug 17, 2024
90b8de3
fix float constant definition
jalvesz Aug 17, 2024
b7c8c81
Merge branch 'fortran-lang:master' into activations
jalvesz Aug 19, 2024
1b3bf4f
update src CMakeLists
jalvesz Aug 19, 2024
f4ad250
add tests for activations
jalvesz Aug 19, 2024
9d7eb7c
add tests for sigmoid and gelu
jalvesz Aug 20, 2024
5727921
missing module procedure
jalvesz Aug 20, 2024
2ed7626
missing interface and change of kind definition for elemental module …
jalvesz Aug 20, 2024
f1acf1e
add SiLU activation
jalvesz Aug 21, 2024
230bea9
Merge branch 'fortran-lang:master' into activations
jalvesz Aug 21, 2024
dd7125d
Merge branch 'fortran-lang:master' into activations
jalvesz Sep 15, 2024
b137b36
Merge branch 'fortran-lang:master' into activations
jalvesz Sep 18, 2024
bc2bf5a
Merge branch 'fortran-lang:master' into activations
jalvesz Sep 24, 2024
5c47bf0
add any rank support for softmax and logsoftmax
jalvesz Sep 29, 2024
8f0cd69
Merge branch 'fortran-lang:master' into activations
jalvesz Oct 26, 2024
1a2245a
Merge branch 'activations' of https://github.com/jalvesz/stdlib into …
jalvesz Oct 26, 2024
5d0419e
homogenize arguments
jalvesz Oct 30, 2024
21851d0
add selu activation
jalvesz Dec 21, 2024
ef6e3e6
Merge branch 'activations' of https://github.com/jalvesz/stdlib into …
jalvesz Dec 22, 2024
1914e78
Add SELU documentation
jalvesz Dec 22, 2024
4c1afde
add tests
jalvesz Dec 23, 2024
bccbdd4
examples
jalvesz Dec 23, 2024
9b4ed49
fix relu example
jalvesz Dec 23, 2024
564c99c
fix tests
jalvesz Dec 23, 2024
9e9b28b
improve specs
jalvesz Dec 23, 2024
14af3f9
examples bugfix
jalvesz Dec 23, 2024
3789518
replace ifs with merge
jalvesz Dec 24, 2024
b36b143
Merge branch 'fortran-lang:master' into activations
jalvesz Dec 24, 2024
eedfad7
Merge branch 'fortran-lang:master' into activations
jalvesz Dec 26, 2024
2cba1ee
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 2, 2025
8dc0654
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 3, 2025
9e0f026
Merge branch 'activations' of https://github.com/jalvesz/stdlib into …
jalvesz Jan 3, 2025
cdde132
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 5, 2025
4363271
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 17, 2025
f06ab3b
Merge branch 'activations' of https://github.com/jalvesz/stdlib into …
jalvesz Jan 18, 2025
e483325
add leaky relu activation
jalvesz Jan 18, 2025
20ecd43
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 29, 2025
d5cfa36
Merge branch 'fortran-lang:master' into activations
jalvesz Jan 30, 2025
1c3fbda
Merge branch 'fortran-lang:master' into activations
jalvesz Feb 10, 2025
259360f
Merge branch 'fortran-lang:master' into activations
jalvesz Feb 28, 2025
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
missing module procedure
  • Loading branch information
jalvesz committed Aug 20, 2024
commit 5727921db172e30ec2f183a19573de5379f77fa6
16 changes: 8 additions & 8 deletions src/stdlib_specialfunctions_activations.fypp
Original file line number Diff line number Diff line change
@@ -173,15 +173,15 @@ end function
! Softmax
!==================================================
#:for rk, rt in REAL_KINDS_TYPES
pure function Softmax_r1_${rk}$( x ) result( y )
pure module function Softmax_r1_${rk}$( x ) result( y )
${rt}$, intent(in) :: x(:)
${rt}$ :: y(size(x))

y = exp(x - maxval(x))
y = y / sum(y)
end function

pure function Softmax_r2_${rk}$( x , dim ) result( y )
pure module function Softmax_r2_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2))

@@ -201,7 +201,7 @@ pure function Softmax_r2_${rk}$( x , dim ) result( y )
end if
end function

pure function Softmax_r3_${rk}$( x , dim ) result( y )
pure module function Softmax_r3_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2),size(x,dim=3))

@@ -221,7 +221,7 @@ pure function Softmax_r3_${rk}$( x , dim ) result( y )
end if
end function

pure function Softmax_r4_${rk}$( x , dim ) result( y )
pure module function Softmax_r4_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:,:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2),size(x,dim=3),size(x,dim=4))

@@ -241,15 +241,15 @@ pure function Softmax_r4_${rk}$( x , dim ) result( y )
end if
end function

pure function Softmax_grad_r1_${rk}$( x ) result( y )
pure module function Softmax_grad_r1_${rk}$( x ) result( y )
${rt}$, intent(in) :: x(:)
${rt}$ :: y(size(x))

y = Softmax(x)
y = y * (1._${rk}$ - y)
end function

pure function Softmax_grad_r2_${rk}$( x , dim ) result( y )
pure module function Softmax_grad_r2_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2))

@@ -262,7 +262,7 @@ pure function Softmax_grad_r2_${rk}$( x , dim ) result( y )
y = y * (1._${rk}$ - y)
end function

pure function Softmax_grad_r3_${rk}$( x , dim ) result( y )
pure module function Softmax_grad_r3_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2),size(x,dim=3))

@@ -275,7 +275,7 @@ pure function Softmax_grad_r3_${rk}$( x , dim ) result( y )
y = y * (1._${rk}$ - y)
end function

pure function Softmax_grad_r4_${rk}$( x , dim ) result( y )
pure module function Softmax_grad_r4_${rk}$( x , dim ) result( y )
${rt}$, intent(in) :: x(:,:,:,:)
${rt}$ :: y(size(x,dim=1),size(x,dim=2),size(x,dim=3),size(x,dim=4))

Loading