Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

smart dependency manager #511

Open
wants to merge 170 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 19 commits
Commits
Show all changes
170 commits
Select commit Hold shift + click to select a range
9208e27
naive first pass, not working
dcolinmorgan Oct 5, 2023
1b1a727
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
eb4ac0c
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
ea08c7c
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
e0c7123
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
a41f762
lint
dcolinmorgan Oct 10, 2023
d54ee2e
umap smart dependecies
dcolinmorgan Oct 11, 2023
01abf59
update umap&feature tests
dcolinmorgan Oct 12, 2023
2e58fa5
update umap&feature tests
dcolinmorgan Oct 12, 2023
2960bda
update umap&feature tests
dcolinmorgan Oct 12, 2023
e2fac00
feature_utils build import_min_exn using deps
dcolinmorgan Oct 12, 2023
70d3e9b
feature_utils build import_min_exn using deps
dcolinmorgan Oct 12, 2023
4d8c6c8
add return types
dcolinmorgan Oct 12, 2023
3c2fdcf
add return types
dcolinmorgan Oct 12, 2023
f168a4f
working dgl, progress on embed
dcolinmorgan Oct 13, 2023
5144e3c
smart packages load, subfunctions not yet
dcolinmorgan Oct 13, 2023
f7a8e01
working embed and library function import
dcolinmorgan Oct 13, 2023
3e3d44c
working embed and library function import
dcolinmorgan Oct 13, 2023
e99cbe5
add functional import to feature/umap
dcolinmorgan Oct 14, 2023
c8523ba
review leo lint
dcolinmorgan Oct 16, 2023
c2b0397
loading just libraries
dcolinmorgan Oct 19, 2023
813fde2
lint
dcolinmorgan Oct 19, 2023
caecfba
lint
dcolinmorgan Oct 19, 2023
4af3fad
lint
dcolinmorgan Oct 19, 2023
22e4d18
lint
dcolinmorgan Oct 19, 2023
68537c6
lint
dcolinmorgan Oct 19, 2023
886d51a
add tests
dcolinmorgan Oct 19, 2023
a4ca316
add tests
dcolinmorgan Oct 19, 2023
f6fb4b9
if library then subfunction import
dcolinmorgan Oct 23, 2023
ed0262b
if library then subfunction import
dcolinmorgan Oct 23, 2023
0f9539d
naive first pass, not working
dcolinmorgan Oct 5, 2023
d34fef2
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
65eca98
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
629b648
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
ff7590b
working smart dep manager in feature_utils
dcolinmorgan Oct 10, 2023
4d7b824
lint
dcolinmorgan Oct 10, 2023
fc89beb
umap smart dependecies
dcolinmorgan Oct 11, 2023
6778a16
update umap&feature tests
dcolinmorgan Oct 12, 2023
df5fcae
update umap&feature tests
dcolinmorgan Oct 12, 2023
8c48dcf
update umap&feature tests
dcolinmorgan Oct 12, 2023
c1df5ba
feature_utils build import_min_exn using deps
dcolinmorgan Oct 12, 2023
0c86a7e
feature_utils build import_min_exn using deps
dcolinmorgan Oct 12, 2023
86f51b3
add return types
dcolinmorgan Oct 12, 2023
7230af2
add return types
dcolinmorgan Oct 12, 2023
45415e8
working dgl, progress on embed
dcolinmorgan Oct 13, 2023
9e28265
smart packages load, subfunctions not yet
dcolinmorgan Oct 13, 2023
5e9956b
working embed and library function import
dcolinmorgan Oct 13, 2023
f595dc5
working embed and library function import
dcolinmorgan Oct 13, 2023
5e25907
add functional import to feature/umap
dcolinmorgan Oct 14, 2023
f47b6d7
review leo lint
dcolinmorgan Oct 16, 2023
511187f
loading just libraries
dcolinmorgan Oct 19, 2023
e7ba215
lint
dcolinmorgan Oct 19, 2023
d784537
lint
dcolinmorgan Oct 19, 2023
8e6cd50
lint
dcolinmorgan Oct 19, 2023
fddde77
lint
dcolinmorgan Oct 19, 2023
9aed732
lint
dcolinmorgan Oct 19, 2023
2ee37fc
add tests
dcolinmorgan Oct 19, 2023
0011a73
add tests
dcolinmorgan Oct 19, 2023
e08c16f
if library then subfunction import
dcolinmorgan Oct 23, 2023
e6f29dd
if library then subfunction import
dcolinmorgan Oct 23, 2023
9f95b7e
Merge branch 'dev/dep_man' of https://github.com/graphistry/pygraphis…
dcolinmorgan Nov 23, 2023
1304968
lint
dcolinmorgan Nov 23, 2023
4dd7d0a
lint
dcolinmorgan Nov 23, 2023
a12898b
lint
dcolinmorgan Nov 23, 2023
a1db061
tqdm bugs ??
dcolinmorgan Nov 23, 2023
9199db0
tqdm bugs ??
dcolinmorgan Nov 24, 2023
f3c12e9
tqdm bugs ??
dcolinmorgan Nov 24, 2023
95be2db
tqdm bugs ??
dcolinmorgan Nov 24, 2023
74092fc
tqdm bugs ??
dcolinmorgan Nov 24, 2023
3210019
test_text_utils deps check
dcolinmorgan Nov 24, 2023
abb999e
test_text_utils deps check
dcolinmorgan Nov 24, 2023
5192f79
typos
dcolinmorgan Nov 24, 2023
0d165dd
ignore type
dcolinmorgan Nov 24, 2023
032193a
lint
dcolinmorgan Nov 24, 2023
75207ce
lint
dcolinmorgan Nov 24, 2023
1f539f1
lint
dcolinmorgan Nov 24, 2023
219555b
lint
dcolinmorgan Nov 24, 2023
8b53e6d
lint
dcolinmorgan Nov 24, 2023
3380fa5
lint
dcolinmorgan Nov 24, 2023
c12ed7e
push test logic
dcolinmorgan Nov 24, 2023
ecdd72b
push test logic
dcolinmorgan Nov 24, 2023
181abfa
push test logic
dcolinmorgan Nov 24, 2023
703e923
push test logic
dcolinmorgan Nov 24, 2023
5d7f750
lint
dcolinmorgan Nov 24, 2023
849baae
lint
dcolinmorgan Nov 24, 2023
6935a91
lint
dcolinmorgan Nov 24, 2023
c1f94c2
lint
dcolinmorgan Nov 24, 2023
eeaef0b
dep_flag lint
dcolinmorgan Nov 24, 2023
8d4c1df
assert logic
dcolinmorgan Nov 24, 2023
37ea918
lint
dcolinmorgan Nov 27, 2023
8e32e0c
lint
dcolinmorgan Nov 27, 2023
1f5f243
lint
dcolinmorgan Nov 27, 2023
20430e0
lint
dcolinmorgan Nov 27, 2023
a3bb113
remove conditional
dcolinmorgan Nov 27, 2023
9528e4a
sklearn assert
dcolinmorgan Nov 27, 2023
d170ace
sklearn assert
dcolinmorgan Nov 27, 2023
6a508c4
sklearn assert
dcolinmorgan Nov 27, 2023
f5812bd
sklearn assert
dcolinmorgan Nov 27, 2023
976d1dd
cumml _v_ test
dcolinmorgan Nov 27, 2023
2faf466
cumml _v_ test
dcolinmorgan Nov 27, 2023
2c96419
lint
dcolinmorgan Nov 27, 2023
ab73859
lint
dcolinmorgan Nov 27, 2023
a379787
lint
dcolinmorgan Nov 27, 2023
580ef32
lint
dcolinmorgan Nov 27, 2023
2c35bb2
lint
dcolinmorgan Nov 27, 2023
3d5aa45
lint
dcolinmorgan Nov 27, 2023
260c3b7
remove two too precise tests
dcolinmorgan Nov 27, 2023
23e4257
lint
dcolinmorgan Nov 27, 2023
c6417f9
lint
dcolinmorgan Nov 27, 2023
457ef7a
lint
dcolinmorgan Nov 27, 2023
69e59e7
add sklearn to core dep
dcolinmorgan Nov 27, 2023
6977d67
add sklearn to core dep
dcolinmorgan Nov 27, 2023
bba6c00
add sklearn to core dep
dcolinmorgan Nov 27, 2023
533a750
add sklearn+umap to core dep
dcolinmorgan Nov 27, 2023
20b1f16
add sklearn+umap to core dep
dcolinmorgan Nov 27, 2023
dd23f25
add sklearn+umap to core dep
dcolinmorgan Nov 27, 2023
3b59258
add scipy, dc to core dep
dcolinmorgan Nov 27, 2023
5e63074
add scipy, dc to core dep
dcolinmorgan Nov 27, 2023
6db86a3
revert to working
dcolinmorgan Nov 27, 2023
42f6a75
Merge branch 'dev/dep_man' of https://github.com/graphistry/pygraphis…
dcolinmorgan Nov 27, 2023
aadc84b
clsoe
dcolinmorgan Nov 27, 2023
edbdf37
remove has_
dcolinmorgan Nov 27, 2023
0ec47bb
np.all to allclose
dcolinmorgan Nov 27, 2023
139f7f9
lint
dcolinmorgan Nov 27, 2023
3223a27
revert allclose
dcolinmorgan Nov 27, 2023
c47df98
drop assert
dcolinmorgan Nov 27, 2023
26cd5e9
drop assert
dcolinmorgan Nov 27, 2023
e47fa35
drop assert
dcolinmorgan Nov 27, 2023
d8f9e6d
lint
dcolinmorgan Nov 27, 2023
1904df5
respond to most comments
dcolinmorgan Dec 4, 2023
a9d3d9e
respond to most comments
dcolinmorgan Dec 4, 2023
0dd4ed6
respond to most comments
dcolinmorgan Dec 4, 2023
6007eb7
respond to tqdm, <2 column comments
dcolinmorgan Dec 5, 2023
6d0cb1c
respond to tqdm, <2 column comments
dcolinmorgan Dec 5, 2023
86378eb
respond to tqdm, <2 column comments
dcolinmorgan Dec 5, 2023
5b36dd0
respond to tqdm
dcolinmorgan Dec 5, 2023
90ca97a
Merge branch 'master' into dev/dep_man
dcolinmorgan Dec 5, 2023
08de406
tqdm set_descr error
dcolinmorgan Dec 5, 2023
b236337
tqdm set_descr error
dcolinmorgan Dec 5, 2023
85e1e24
tqdm not trange has "set_description"
dcolinmorgan Dec 5, 2023
c86cb53
tqdm not trange has "set_description"
dcolinmorgan Dec 5, 2023
5d5146f
tqdm not trange has "set_description"
dcolinmorgan Dec 5, 2023
8640971
tqdm.tqdm
dcolinmorgan Dec 5, 2023
58d9810
tqdm.tqdm
dcolinmorgan Dec 5, 2023
d02d480
fallback to lazy import
dcolinmorgan Dec 5, 2023
a39928c
fallback to lazy import
dcolinmorgan Dec 5, 2023
cedd9ad
half lazy import
dcolinmorgan Dec 5, 2023
dcfdd9c
smart import
dcolinmorgan Dec 5, 2023
cc8c4d2
smart import
dcolinmorgan Dec 5, 2023
79045df
smart import
dcolinmorgan Dec 5, 2023
21bf0c9
lint
dcolinmorgan Dec 5, 2023
9801824
refactored 1 column exception workaround
dcolinmorgan Dec 5, 2023
7b86a04
refactored 1 column exception workaround
dcolinmorgan Dec 5, 2023
0eea678
Merge branch 'master' into dev/dep_man
dcolinmorgan May 23, 2024
7441b29
no explicit lazy
dcolinmorgan May 23, 2024
52abe0f
lint
dcolinmorgan May 23, 2024
f87139d
lint
dcolinmorgan May 23, 2024
637a991
lint
dcolinmorgan May 23, 2024
21d2748
lint
dcolinmorgan May 23, 2024
f0db78b
remove defunct lazies
dcolinmorgan May 23, 2024
9189800
lint
dcolinmorgan May 23, 2024
0de2ffa
lint
dcolinmorgan May 23, 2024
071faf1
lint
dcolinmorgan May 23, 2024
62c58bc
lint
dcolinmorgan May 23, 2024
ab49794
lint
dcolinmorgan May 23, 2024
8cb2838
lint
dcolinmorgan May 23, 2024
bb8a258
lint sheesh
dcolinmorgan May 23, 2024
0309329
test gpu-avail
dcolinmorgan May 24, 2024
f37ce87
lint
dcolinmorgan May 27, 2024
83f8fc5
lint2
dcolinmorgan May 27, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
36 changes: 36 additions & 0 deletions graphistry/dep_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
import importlib

class DepManager:
def __init__(self):
self.pkgs = {}

def __getattr__(self, pkg:str):
dcolinmorgan marked this conversation as resolved.
Show resolved Hide resolved
if '_' not in pkg:
dcolinmorgan marked this conversation as resolved.
Show resolved Hide resolved
self._add_deps(pkg)
try:
return True, "ok", self.pkgs[pkg], self.pkgs[pkg].__version__
except KeyError:
return False, str(pkg) + " not installed", None, None
else:
module = '.'.join(pkg.split('_')[:-1])
dcolinmorgan marked this conversation as resolved.
Show resolved Hide resolved
name = pkg.split('_')[-1]
self.import_from(module, name)
try:
return True, "ok", self.pkgs[name], self.pkgs[module].__version
except KeyError:
return False, str([module,name]) + " not installed", None, None

def _add_deps(self, pkg:str):
try:
pkg_val = importlib.import_module(pkg)
self.pkgs[pkg] = pkg_val
# setattr(self, pkg, pkg_val)
except:
setattr(self, pkg, None)
dcolinmorgan marked this conversation as resolved.
Show resolved Hide resolved

def import_from(self,pkg:str, name:str):
try:
module = __import__(pkg, fromlist=[name])
self.pkgs[name] = module
except:
setattr(self, pkg, None)
dcolinmorgan marked this conversation as resolved.
Show resolved Hide resolved
48 changes: 25 additions & 23 deletions graphistry/dgl_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
)

from .util import setup_logger

from .dep_manager import DepManager

if TYPE_CHECKING:
import scipy
Expand All @@ -34,24 +34,24 @@
MIXIN_BASE = object


def lazy_dgl_import_has_dependency():
try:
import warnings
warnings.filterwarnings('ignore')
import dgl # noqa: F811
return True, 'ok', dgl
except ModuleNotFoundError as e:
return False, e, None
# def lazy_dgl_import_has_dependency():
# try:
# import warnings
# warnings.filterwarnings('ignore')
# import dgl # noqa: F811
# return True, 'ok', dgl
# except ModuleNotFoundError as e:
# return False, e, None


def lazy_torch_import_has_dependency():
try:
import warnings
warnings.filterwarnings('ignore')
import torch # noqa: F811
return True, 'ok', torch
except ModuleNotFoundError as e:
return False, e, None
# def lazy_torch_import_has_dependency():
# try:
# import warnings
# warnings.filterwarnings('ignore')
# import torch # noqa: F811
# return True, 'ok', torch
# except ModuleNotFoundError as e:
# return False, e, None


logger = setup_logger(name=__name__, verbose=config.VERBOSE)
Expand All @@ -73,7 +73,7 @@ def convert_to_torch(X_enc: pd.DataFrame, y_enc: Optional[pd.DataFrame]): # typ
:param y_enc: DataFrame Matrix of Values for Target
:return: Dictionary of torch encoded arrays
"""
_, _, torch = lazy_torch_import_has_dependency() # noqa: F811
_, _, torch, _ = deps.torch # noqa: F811

if not y_enc.empty: # type: ignore
data = {
Expand All @@ -98,7 +98,7 @@ def get_available_devices():
device (torch.device): Main device (GPU 0 or CPU).
gpu_ids (list): List of IDs of all GPUs that are available.
"""
_, _, torch = lazy_torch_import_has_dependency() # noqa: F811
_, _, torch, _ = deps.torch # noqa: F811

gpu_ids = []
if torch.cuda.is_available():
Expand Down Expand Up @@ -181,7 +181,9 @@ def pandas_to_dgl_graph(
sp_mat: sparse scipy matrix
ordered_nodes_dict: dict ordered from most common src and dst nodes
"""
_, _, dgl = lazy_dgl_import_has_dependency() # noqa: F811
deps = DepManager()
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same, strange to re-init everywhere

_, _, dgl, _ = deps.dgl # noqa: F811

sp_mat, ordered_nodes_dict = pandas_to_sparse_adjacency(df, src, dst, weight_col)
g = dgl.from_scipy(sp_mat, device=device) # there are other ways too
logger.info(f"Graph Type: {type(g)}")
Expand All @@ -196,7 +198,7 @@ def get_torch_train_test_mask(n: int, ratio: float = 0.8):
:param ratio: mimics train/test split. `ratio` sets number of True vs False mask entries.
:return: train and test torch tensor masks
"""
_, _, torch = lazy_torch_import_has_dependency() # noqa: F811
_, _, torch, _ = deps.torch # noqa: F811

train_mask = torch.zeros(n, dtype=torch.bool).bernoulli(ratio)
test_mask = ~train_mask
Expand Down Expand Up @@ -225,8 +227,8 @@ def dgl_lazy_init(self, train_split: float = 0.8, device: str = "cpu"):
"""

if not self.dgl_initialized:
lazy_dgl_import_has_dependency()
lazy_torch_import_has_dependency()
deps.dgl
deps.torch
self.train_split = train_split
self.device = device
self._removed_edges_previously = False
Expand Down
72 changes: 39 additions & 33 deletions graphistry/embed_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,40 +5,36 @@

from .PlotterBase import Plottable
from .compute.ComputeMixin import ComputeMixin
from .dep_manager import DepManager


def lazy_embed_import_dep():
try:
import torch
import torch.nn as nn
import dgl
from dgl.dataloading import GraphDataLoader
import torch.nn.functional as F
from .networks import HeteroEmbed
from tqdm import trange
return True, torch, nn, dgl, GraphDataLoader, HeteroEmbed, F, trange

except:
return False, None, None, None, None, None, None, None

def check_cudf():
try:
import cudf
return True, cudf
except:
return False, object

# def lazy_embed_import_dep():
# try:
# import torch
# import torch.nn as nn
# import dgl
# from dgl.dataloading import GraphDataLoader
# import torch.nn.functional as F
# from .networks import HeteroEmbed
# from tqdm import trange
# return True, torch, nn, dgl, GraphDataLoader, HeteroEmbed, F, trange

# except:
# return False, None, None, None, None, None, None, None

deps = DepManager()

if TYPE_CHECKING:
_, torch, _, _, _, _, _, _ = lazy_embed_import_dep()
_, _, torch, _ = deps.torch
TT = torch.Tensor
MIXIN_BASE = ComputeMixin
else:
TT = Any
MIXIN_BASE = object
torch = Any

has_cudf, cudf = check_cudf()

has_cudf, _, cudf, _ = deps.cudf

XSymbolic = Optional[Union[List[str], str, pd.DataFrame]]
ProtoSymbolic = Optional[Union[str, Callable[[TT, TT, TT], TT]]] # type: ignore
Expand Down Expand Up @@ -99,8 +95,7 @@ def __init__(self):
self._device = "cpu"

def _preprocess_embedding_data(self, res, train_split:Union[float, int] = 0.8) -> Plottable:
#_, torch, _, _, _, _, _, _ = lazy_embed_import_dep()
import torch
_, _, torch, _ = deps.torch
log('Preprocessing embedding data')
src, dst = res._source, res._destination
relation = res._relation
Expand Down Expand Up @@ -147,7 +142,7 @@ def _preprocess_embedding_data(self, res, train_split:Union[float, int] = 0.8) -
return res

def _build_graph(self, res) -> Plottable:
_, _, _, dgl, _, _, _, _ = lazy_embed_import_dep()
_, _, dgl, _ = deps.dgl
s, r, t = res._triplets.T

if res._train_idx is not None:
Expand All @@ -169,9 +164,11 @@ def _build_graph(self, res) -> Plottable:


def _init_model(self, res, batch_size:int, sample_size:int, num_steps:int, device):
_, _, _, _, GraphDataLoader, HeteroEmbed, _, _ = lazy_embed_import_dep()
# _, _, _, _, GraphDataLoader, HeteroEmbed, _, _ = lazy_embed_import_dep()
_, _, GraphDataLoader, _ = deps.dgl_dataloading
_, _, HeteroEmbed, _ = deps.networks_HeteroEmbed
g_iter = SubgraphIterator(res._kg_dgl, sample_size, num_steps)
g_dataloader = GraphDataLoader(
g_dataloader = dgl.GraphDataLoader(
g_iter, batch_size=batch_size, collate_fn=lambda x: x[0]
)

Expand All @@ -188,7 +185,10 @@ def _init_model(self, res, batch_size:int, sample_size:int, num_steps:int, devic
return model, g_dataloader

def _train_embedding(self, res, epochs:int, batch_size:int, lr:float, sample_size:int, num_steps:int, device) -> Plottable:
_, torch, nn, _, _, _, _, trange = lazy_embed_import_dep()
# _, torch, nn, _, _, _, _, trange = lazy_embed_import_dep()
_, _, torch, _ = dep.torch
_, _, nn, _ = dep.torch.nn
_, _, trange, _ = dep.tqdm.trange
log('Training embedding')
model, g_dataloader = res._init_model(res, batch_size, sample_size, num_steps, device)
if hasattr(res, "_embed_model") and not res._build_new_embedding_model:
Expand Down Expand Up @@ -232,7 +232,7 @@ def _train_embedding(self, res, epochs:int, batch_size:int, lr:float, sample_siz

@property
def _gcn_node_embeddings(self):
_, torch, _, _, _, _, _, _ = lazy_embed_import_dep()
_, _, torch, _ = deps.torch
g_dgl = self._kg_dgl.to(self._device)
em = self._embed_model(g_dgl).detach()
torch.cuda.empty_cache()
Expand Down Expand Up @@ -540,7 +540,7 @@ def fetch_triplets_for_inference(x_r):


def _score(self, triplets: Union[np.ndarray, TT]) -> TT: # type: ignore
_, torch, _, _, _, _, _, _ = lazy_embed_import_dep()
_, _, torch, _ = deps.torch
emb = self._kg_embeddings.clone().detach()
if not isinstance(triplets, torch.Tensor):
triplets = torch.tensor(triplets)
Expand Down Expand Up @@ -571,7 +571,13 @@ def __len__(self) -> int:
return self.num_steps

def __getitem__(self, i:int):
_, torch, nn, dgl, GraphDataLoader, _, F, _ = lazy_embed_import_dep()
# _, torch, nn, dgl, GraphDataLoader, _, F, _ = lazy_embed_import_dep()
_, _, torch, _ = deps.torch
_, _, nn, _ = deps.torch_nn
_, _, dgl, _ = deps.dgl
_, _, GraphDataLoader, _ = deps.dgl_dataloading
_, _, F, _ = deps.torch_nn_functional

eids = torch.from_numpy(np.random.choice(self.eids, self.sample_size))

src, dst = self.g.find_edges(eids)
Expand All @@ -593,7 +599,7 @@ def __getitem__(self, i:int):

@staticmethod
def _sample_neg(triplets:np.ndarray, num_nodes:int) -> Tuple[TT, TT]: # type: ignore
_, torch, _, _, _, _, _, _ = lazy_embed_import_dep()
_, _, torch, _ = deps.torch
triplets = torch.tensor(triplets)
h, r, t = triplets.T
h_o_t = torch.randint(high=2, size=h.size())
Expand Down
Loading
Loading