-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Refactored all the transformer tests and gave each of them a name.
- Loading branch information
1 parent
9002452
commit 4c82474
Showing
8 changed files
with
109 additions
and
67 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
51 changes: 25 additions & 26 deletions
51
test/transformer_related/multi_head_attention_stiefel_retraction.jl
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,45 +1,44 @@ | ||
""" | ||
This is a test for that checks if the retractions (geodesic and Cayley for now) map from StiefelLieAlgHorMatrix to StiefelManifold when used with MultiHeadAttention. | ||
""" | ||
|
||
import Random, Test, Lux, LinearAlgebra, KernelAbstractions | ||
|
||
using GeometricMachineLearning, Test | ||
using GeometricMachineLearning: geodesic | ||
using GeometricMachineLearning: cayley | ||
using GeometricMachineLearning: init_optimizer_cache | ||
|
||
dim = 64 | ||
n_heads = 8 | ||
Dₕ = dim÷8 | ||
tol = eps(Float32) | ||
T = Float32 | ||
backend = KernelAbstractions.CPU() | ||
|
||
model = MultiHeadAttention(dim, n_heads, Stiefel=true) | ||
|
||
ps = initialparameters(backend, T, model) | ||
|
||
cache = init_optimizer_cache(MomentumOptimizer(), ps) | ||
|
||
E = StiefelProjection(dim, Dₕ, T) | ||
function check_retraction_geodesic(A::AbstractMatrix) | ||
@doc raw""" | ||
This function computes the geodesic retraction of an element of `StiefelLieAlgHorMatrix` and then checks if the resulting element is `StiefelProjection`. | ||
""" | ||
function check_retraction_geodesic(A::AbstractMatrix{T}, tol=eps(T)) where T | ||
A_retracted = geodesic(A) | ||
@test typeof(A_retracted) <: StiefelManifold | ||
@test LinearAlgebra.norm(A_retracted - E) < tol | ||
@test LinearAlgebra.norm(A_retracted - StiefelProjection(A_retracted)) < tol | ||
end | ||
check_retraction_geodesic(cache::NamedTuple) = apply_toNT(check_retraction_geodesic, cache) | ||
check_retraction_geodesic(B::MomentumCache) = check_retraction_geodesic(B.B) | ||
|
||
check_retraction_geodesic(cache) | ||
|
||
E = StiefelProjection(dim, Dₕ) | ||
function check_retraction_cayley(A::AbstractMatrix) | ||
@doc raw""" | ||
This function computes the cayley retraction of an element of `StiefelLieAlgHorMatrix` and then checks if the resulting element is `StiefelProjection`. | ||
""" | ||
function check_retraction_cayley(A::AbstractMatrix{T}, tol=eps(T)) where T | ||
A_retracted = cayley(A) | ||
@test typeof(A_retracted) <: StiefelManifold | ||
@test LinearAlgebra.norm(A_retracted - E) < tol | ||
@test LinearAlgebra.norm(A_retracted - StiefelProjection(A_retracted)) < tol | ||
end | ||
check_retraction_cayley(cache::NamedTuple) = apply_toNT(check_retraction_cayley, cache) | ||
check_retraction_cayley(B::MomentumCache) = check_retraction_cayley(B.B) | ||
|
||
check_retraction_cayley(cache) | ||
@doc raw""" | ||
This is a test for that checks if the retractions (geodesic and Cayley for now) map from `StiefelLieAlgHorMatrix` to `StiefelManifold` when used with `MultiHeadAttention`. | ||
""" | ||
function test_multi_head_attention_retraction(T::Type, dim, n_heads, tol=eps(T), backend=KernelAbstractions.CPU()) | ||
model = MultiHeadAttention(dim, n_heads, Stiefel=true) | ||
|
||
ps = initialparameters(backend, T, model) | ||
cache = init_optimizer_cache(MomentumOptimizer(), ps) | ||
|
||
check_retraction_geodesic(cache) | ||
|
||
check_retraction_cayley(cache) | ||
end | ||
|
||
test_multi_head_attention_retraction(Float32, 64, 8) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters