Skip to content

Commit

Permalink
updating actionflow
Browse files Browse the repository at this point in the history
  • Loading branch information
JayKumarr committed Mar 13, 2024
1 parent 2ed0b58 commit f97ce56
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 31 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ jobs:
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Test with pytest
run: |
pytest tests/testing_sdtw.py
pytest softdtwkeras/tests/testing_sdtw.py
Empty file added softdtwkeras/tests/__init__.py
Empty file.
63 changes: 33 additions & 30 deletions tests/testing_sdtw.py → softdtwkeras/tests/testing_sdtw.py
Original file line number Diff line number Diff line change
@@ -1,30 +1,33 @@
import numpy as np
from tslearn.metrics import SquaredEuclidean, SoftDTW
import tensorflow as tf
from softdtwkeras.SDTWLoss import SDTWLoss

if __name__ == '__main__':
x_ = np.array([[0,0], [0.9,0], [0,0], [0.5,1], [0,0], [0,0]])
y_ = np.array([[0,0], [0,0], [0.1,0], [0.9,1], [0,0], [0.5,0]])


x_tf = tf.convert_to_tensor(x_, dtype=tf.float64)
y_tf = tf.convert_to_tensor(y_, dtype=tf.float64)


D = SquaredEuclidean(x_, y_)
sdtw = SoftDTW(D, gamma=0.001)
# soft-DTW discrepancy, approaches DTW as gamma -> 0
value = sdtw.compute()
# gradient w.r.t. D, shape = [m, n], which is also the expected alignment matrix
E = sdtw.grad()
# gradient w.r.t. X, shape = [m, d]
G = D.jacobian_product(E)

tff_sdtw__= SDTWLoss(gamma=0.001)
val_tensor = tff_sdtw__.unit_loss(x_tf, y_tf)



print(value)
print(val_tensor)
import numpy as np
from tslearn.metrics import SquaredEuclidean, SoftDTW
import tensorflow as tf
from softdtwkeras.SDTWLoss import SDTWLoss

def test_soft_dtw():
x_ = np.array([[0, 0], [0.9, 0], [0, 0], [0.5, 1], [0, 0], [0, 0]])
y_ = np.array([[0, 0], [0, 0], [0.1, 0], [0.9, 1], [0, 0], [0.5, 0]])

x_tf = tf.convert_to_tensor(x_, dtype=tf.float64)
y_tf = tf.convert_to_tensor(y_, dtype=tf.float64)

D = SquaredEuclidean(x_, y_)
sdtw = SoftDTW(D, gamma=0.001)
# soft-DTW discrepancy, approaches DTW as gamma -> 0
value = sdtw.compute()
# gradient w.r.t. D, shape = [m, n], which is also the expected alignment matrix
E = sdtw.grad()
# gradient w.r.t. X, shape = [m, d]
G = D.jacobian_product(E)

tff_sdtw__ = SDTWLoss(gamma=0.001)
val_tensor = tff_sdtw__.unit_loss(x_tf, y_tf)
print(value)
print(val_tensor)
assert np.round(value, decimals=5) == tf.round(val_tensor * 1e5) / 1e5

if __name__ == '__main__':
test_soft_dtw()




0 comments on commit f97ce56

Please sign in to comment.