Skip to content

Commit

Permalink
[Optimization] Use scipy's eigs instead of numpy in lap_pe (#5855)
Browse files Browse the repository at this point in the history
Co-authored-by: rudongyu <ru_dongyu@outlook.com>
Co-authored-by: Hongzhi (Steve), Chen <chenhongzhi.nkcs@gmail.com>
  • Loading branch information
3 people committed Aug 24, 2023
1 parent 95221f0 commit 019a1a6
Showing 1 changed file with 20 additions and 7 deletions.
27 changes: 20 additions & 7 deletions python/dgl/transforms/functional.py
Original file line number Diff line number Diff line change
Expand Up @@ -3680,13 +3680,26 @@ def lap_pe(g, k, padding=False, return_eigval=False):
L = sparse.eye(g.num_nodes()) - N * A * N

# select eigenvectors with smaller eigenvalues O(n + klogk)
EigVal, EigVec = np.linalg.eig(L.toarray())
max_freqs = min(n - 1, k)
kpartition_indices = np.argpartition(EigVal, max_freqs)[: max_freqs + 1]
topk_eigvals = EigVal[kpartition_indices]
topk_indices = kpartition_indices[topk_eigvals.argsort()][1:]
topk_EigVec = EigVec[:, topk_indices]
eigvals = F.tensor(EigVal[topk_indices], dtype=F.float32)
if k + 1 < n - 1:
# Use scipy if k + 1 < n - 1 for memory efficiency.
EigVal, EigVec = scipy.sparse.linalg.eigs(
L, k=k + 1, which="SR", tol=1e-2
)
topk_indices = EigVal.argsort()[1:]
# Since scipy may return complex value, to avoid crashing in NN code,
# convert them to real number.
topk_eigvals = EigVal[topk_indices].real
topk_EigVec = EigVec[:, topk_indices].real
else:
# Fallback to numpy since scipy.sparse do not support this case.
EigVal, EigVec = np.linalg.eig(L.toarray())
max_freqs = min(n - 1, k)
kpartition_indices = np.argpartition(EigVal, max_freqs)[: max_freqs + 1]
topk_eigvals = EigVal[kpartition_indices]
topk_indices = kpartition_indices[topk_eigvals.argsort()][1:]
topk_EigVec = EigVec[:, topk_indices]
topk_EigVal = EigVal[topk_indices]
eigvals = F.tensor(topk_EigVal, dtype=F.float32)

# get random flip signs
rand_sign = 2 * (np.random.rand(max_freqs) > 0.5) - 1.0
Expand Down

0 comments on commit 019a1a6

Please sign in to comment.