Skip to content

Commit

Permalink
Nick/tenmat constructors (#293)
Browse files Browse the repository at this point in the history
* Add copy constructor for tenmat.

* SPTENMAT: Remove from_data. Merge with __init__

* SPTENMAT: Move from_tensor_type to to_sptenmat
* Add isequal

* Pull out some common code ahead of tenmat constructor refactor.

* TENMAT: Decouple from_data and from_tensor
* From_data should only accept vector or matrix

* TENMAT: Merge from_data into default constructor

* TENMAT: Move from_tensor to to_tenmat
* Add isequal utility method to tenmat

* Update tutorials to match new constructors.

* Improve some documentation for updated constructors/methods.

* BUILD: Temporarily pin ruff to unblock
  • Loading branch information
ntjohnson1 authored Apr 3, 2024
1 parent 42f1289 commit ebd5069
Show file tree
Hide file tree
Showing 12 changed files with 829 additions and 575 deletions.
40 changes: 12 additions & 28 deletions docs/source/tutorial/class_sptenmat.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(X, np.array([0])) # Mode-0 matricization\n",
"A = X.to_sptenmat(np.array([0])) # Mode-0 matricization\n",
"A"
]
},
Expand All @@ -81,7 +81,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(X, np.array([1, 2])) # Multiple modes mapped to rows.\n",
"A = X.to_sptenmat(np.array([1, 2])) # Multiple modes mapped to rows.\n",
"A"
]
},
Expand All @@ -92,9 +92,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, cdims=np.array([1, 2])\n",
") # Specify column dimensions.\n",
"A = X.to_sptenmat(cdims=np.array([1, 2])) # Specify column dimensions.\n",
"A"
]
},
Expand All @@ -105,9 +103,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, np.arange(4)\n",
") # All modes mapped to rows, i.e., vectorize.\n",
"A = X.to_sptenmat(np.arange(4)) # All modes mapped to rows, i.e., vectorize.\n",
"A"
]
},
Expand All @@ -118,9 +114,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, np.array([1])\n",
") # By default, columns are ordered as [0, 2, 3]\n",
"A = X.to_sptenmat(np.array([1])) # By default, columns are ordered as [0, 2, 3]\n",
"A"
]
},
Expand All @@ -131,9 +125,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, np.array([1]), np.array([3, 0, 2])\n",
") # Specify explicit ordering\n",
"A = X.to_sptenmat(np.array([1]), np.array([3, 0, 2])) # Specify explicit ordering\n",
"A"
]
},
Expand All @@ -144,9 +136,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"fc\"\n",
") # Forward cyclic column ordering\n",
"A = X.to_sptenmat(np.array([1]), cdims_cyclic=\"fc\") # Forward cyclic column ordering\n",
"A"
]
},
Expand All @@ -157,9 +147,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic column ordering\n",
"A = X.to_sptenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic column ordering\n",
"A"
]
},
Expand Down Expand Up @@ -236,9 +224,7 @@
"metadata": {},
"outputs": [],
"source": [
"B = ttb.sptenmat.from_data(\n",
" A.subs, A.vals, A.rdims, A.cdims, A.tshape\n",
") # Effectively copies A\n",
"B = ttb.sptenmat(A.subs, A.vals, A.rdims, A.cdims, A.tshape) # Effectively copies A\n",
"B"
]
},
Expand All @@ -257,9 +243,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.sptenmat.from_data(\n",
" rdims=A.rdims, cdims=A.cdims, tshape=A.tshape\n",
") # An empty sptenmat\n",
"A = ttb.sptenmat(rdims=A.rdims, cdims=A.cdims, tshape=A.tshape) # An empty sptenmat\n",
"A"
]
},
Expand Down Expand Up @@ -298,7 +282,7 @@
"outputs": [],
"source": [
"X = ttb.sptenrand((10, 10, 10, 10), nonzeros=10) # Create sptensor\n",
"A = ttb.sptenmat.from_tensor_type(X, np.array([0])) # Convert to an sptenmat\n",
"A = X.to_sptenmat(np.array([0])) # Convert to an sptenmat\n",
"A"
]
},
Expand Down Expand Up @@ -328,7 +312,7 @@
"metadata": {},
"outputs": [],
"source": [
"B = ttb.sptenmat.from_tensor_type(ttb.sptenrand((3, 3, 3), nonzeros=3), np.array([0]))\n",
"B = ttb.sptenrand((3, 3, 3), nonzeros=3).to_sptenmat(np.array([0]))\n",
"B"
]
},
Expand Down
70 changes: 20 additions & 50 deletions docs/source/tutorial/class_tenmat.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
"outputs": [],
"source": [
"# Dims [0,1] map to rows, [2,3] to columns.\n",
"A = ttb.tenmat.from_tensor_type(X, np.array([0, 1]), np.array([2, 3]))\n",
"A = X.to_tenmat(np.array([0, 1]), np.array([2, 3]))\n",
"A"
]
},
Expand All @@ -63,7 +63,7 @@
"metadata": {},
"outputs": [],
"source": [
"B = ttb.tenmat.from_tensor_type(X, np.array([1, 0]), np.array([2, 3])) # Order matters!\n",
"B = X.to_tenmat(np.array([1, 0]), np.array([2, 3])) # Order matters!\n",
"B"
]
},
Expand All @@ -73,7 +73,7 @@
"metadata": {},
"outputs": [],
"source": [
"C = ttb.tenmat.from_tensor_type(X, np.array([0, 1]), np.array([3, 2]))\n",
"C = X.to_tenmat(np.array([0, 1]), np.array([3, 2]))\n",
"C"
]
},
Expand All @@ -92,9 +92,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1])\n",
") # np.array([1]) passed to the `rdims` parameter\n",
"A = X.to_tenmat(np.array([1])) # np.array([1]) passed to the `rdims` parameter\n",
"A"
]
},
Expand All @@ -114,7 +112,7 @@
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"# Same as A = ttb.tenmat.from_tensor_type(X, np.array([0,3]), np.array([1,2]))\n",
"A = ttb.tenmat.from_tensor_type(X, cdims=np.array([1, 2]))\n",
"A = X.to_tenmat(cdims=np.array([1, 2]))\n",
"A"
]
},
Expand All @@ -132,9 +130,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, cdims=np.arange(0, 4)\n",
") # Map all the dimensions to the columns\n",
"A = X.to_tenmat(cdims=np.arange(0, 4)) # Map all the dimensions to the columns\n",
"A"
]
},
Expand All @@ -153,9 +149,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([2])\n",
") # By default, columns are ordered as [0, 1, 3].\n",
"A = X.to_tenmat(np.array([2])) # By default, columns are ordered as [0, 1, 3].\n",
"A"
]
},
Expand All @@ -166,9 +160,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), np.array([2, 0, 3])\n",
") # Explicit specification.\n",
"A = X.to_tenmat(np.array([1]), np.array([2, 0, 3])) # Explicit specification.\n",
"A"
]
},
Expand All @@ -178,9 +170,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"fc\"\n",
") # Forward cyclic, [2,3,0].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"fc\") # Forward cyclic, [2,3,0].\n",
"A"
]
},
Expand All @@ -190,9 +180,7 @@
"metadata": {},
"outputs": [],
"source": [
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A"
]
},
Expand All @@ -210,9 +198,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A.data # The 2D numpy array itself."
]
},
Expand Down Expand Up @@ -257,10 +243,8 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"B = ttb.tenmat.from_data(A.data, A.rindices, A.cindices, A.tshape)\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"B = ttb.tenmat(A.data, A.rindices, A.cindices, A.tshape)\n",
"B # Recreates A."
]
},
Expand Down Expand Up @@ -295,9 +279,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A.double() # Converts A to a standard 2D numpy array."
]
},
Expand All @@ -315,9 +297,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"Y = A.to_tensor()\n",
"Y"
]
Expand All @@ -336,9 +316,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A.shape # 2D numpy array shape."
]
},
Expand All @@ -365,9 +343,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A[1, 0] # Returns the (1,0) element of the 2D numpy array"
]
},
Expand All @@ -385,9 +361,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A[0:2, 0:2] = np.ones((2, 2))\n",
"A"
]
Expand Down Expand Up @@ -422,9 +396,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"A.norm() # Norm of the 2D numpy array."
]
},
Expand Down Expand Up @@ -488,9 +460,7 @@
"outputs": [],
"source": [
"X = ttb.tensor(np.arange(1, 25), shape=(3, 2, 2, 2)) # Create a tensor.\n",
"A = ttb.tenmat.from_tensor_type(\n",
" X, np.array([1]), cdims_cyclic=\"bc\"\n",
") # Backward cyclic, [0,3,2].\n",
"A = X.to_tenmat(np.array([1]), cdims_cyclic=\"bc\") # Backward cyclic, [0,3,2].\n",
"B = A * A.ctranspose() # Tenmat that is the product of two tenmats.\n",
"B"
]
Expand Down
2 changes: 1 addition & 1 deletion pyttb/hosvd.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ def hosvd( # noqa: PLR0912,PLR0913,PLR0915

for k in dimorder:
# Compute Gram matrix
Yk = ttb.tenmat.from_tensor_type(Y, np.array([k])).double()
Yk = Y.to_tenmat(np.array([k])).double()
Z = np.dot(Yk, Yk.transpose())

# Compute eigenvalue decomposition
Expand Down
44 changes: 43 additions & 1 deletion pyttb/pyttb_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

from enum import Enum
from inspect import signature
from typing import List, Optional, Tuple, Union, get_args, overload
from typing import List, Literal, Optional, Tuple, Union, get_args, overload

import numpy as np

Expand Down Expand Up @@ -879,3 +879,45 @@ def get_mttkrp_factors(
assert len(U) == ndims, "List of factor matrices is the wrong length"

return U


def gather_wrap_dims(
ndims: int,
rdims: Optional[np.ndarray] = None,
cdims: Optional[np.ndarray] = None,
cdims_cyclic: Optional[Union[Literal["fc"], Literal["bc"], Literal["t"]]] = None,
) -> Tuple[np.ndarray, np.ndarray]:
alldims = np.array([range(ndims)])

if rdims is not None and cdims is None:
# Single row mapping
if len(rdims) == 1 and cdims_cyclic is not None:
# TODO we should be able to remove this since we can just specify
# cdims alone
if cdims_cyclic == "t":
cdims = rdims
rdims = np.setdiff1d(alldims, rdims)
elif cdims_cyclic == "fc":
cdims = np.array(
[i for i in range(rdims[0] + 1, ndims)]
+ [i for i in range(rdims[0])]
)
elif cdims_cyclic == "bc":
cdims = np.array(
[i for i in range(rdims[0] - 1, -1, -1)]
+ [i for i in range(ndims - 1, rdims[0], -1)]
)
else:
assert False, (
"Unrecognized value for cdims_cyclic pattern, "
'must be "fc" or "bc".'
)
else:
# Multiple row mapping
cdims = np.setdiff1d(alldims, rdims)

elif rdims is None and cdims is not None:
rdims = np.setdiff1d(alldims, cdims)

assert rdims is not None and cdims is not None
return rdims.astype(int), cdims.astype(int)
Loading

0 comments on commit ebd5069

Please sign in to comment.