diff --git a/hypernetx/hif.py b/hypernetx/hif.py index 6d9539f3..1708acdf 100644 --- a/hypernetx/hif.py +++ b/hypernetx/hif.py @@ -29,22 +29,27 @@ def normalize_dataframe(df): pd.DataFrame allowed columns are limited to HIF keys """ - default_cols = ['weight'] + list(set(df.columns).intersection(['direction'])) + ['misc_properties'] + default_cols = ( + ["weight"] + + list(set(df.columns).intersection(["direction"])) + + ["misc_properties"] + ) cols = list(set(df.columns).difference(default_cols)) dfdict = df[cols].T.to_dict() newdf = df[default_cols] for uid in newdf.index: - newdf.loc[uid]['misc_properties'].update(dfdict[uid]) - return newdf.fillna('nil') - -def to_hif(hg,filename=None, network_type='undirected', metadata=None): + newdf.loc[uid]["misc_properties"].update(dfdict[uid]) + return newdf.fillna("nil") + + +def to_hif(hg, filename=None, network_type="undirected", metadata=None): """ Returns a dictionary object valid for the HIF Json schema Parameters ---------- hg : hnx.Hypergraph - + filename : str, optional filepath where json object is to be stored, by default None network_type : str, optional @@ -57,43 +62,50 @@ def to_hif(hg,filename=None, network_type='undirected', metadata=None): hif : dict format is defined by HIF schema """ - hyp_objs = ['nodes','edges','incidences'] - defaults = {part:dict(getattr(hg,part).property_store._defaults) for part in hyp_objs} + hyp_objs = ["nodes", "edges", "incidences"] + defaults = { + part: dict(getattr(hg, part).property_store._defaults) for part in hyp_objs + } for part in hyp_objs: - misc_properties = defaults[part].pop('misc_properties',{}) - defaults[part]['attrs'] = dict(misc_properties) - + misc_properties = defaults[part].pop("misc_properties", {}) + defaults[part]["attrs"] = dict(misc_properties) + incj = deepcopy(hg.incidences.to_dataframe) - incj.index.names = ['edge','node'] + incj.index.names = ["edge", "node"] incj = normalize_dataframe(incj) - incj = incj.rename(columns={"misc_properties":"attrs"}) + incj = incj.rename(columns={"misc_properties": "attrs"}) incj = incj.reset_index().to_dict(orient="records") - + edgj = deepcopy(hg.edges.to_dataframe) - edgj.index.names = ['edge'] + edgj.index.names = ["edge"] edgj = normalize_dataframe(edgj) - edgj = edgj.rename(columns={"misc_properties":"attrs"}) + edgj = edgj.rename(columns={"misc_properties": "attrs"}) edgj = edgj.reset_index().to_dict(orient="records") - + nodj = deepcopy(hg.nodes.to_dataframe) - nodj.index.names = ['node'] + nodj.index.names = ["node"] nodj = normalize_dataframe(nodj) - nodj = nodj.rename(columns={"misc_properties":"attrs"}) + nodj = nodj.rename(columns={"misc_properties": "attrs"}) nodj = nodj.reset_index().to_dict(orient="records") - if isinstance(metadata,dict): - metadata = metadata.update({'default_attrs':defaults}) + if isinstance(metadata, dict): + metadata = metadata.update({"default_attrs": defaults}) else: - metadata = {'default_attrs':defaults} + metadata = {"default_attrs": defaults} if hg.name is not None: - metadata['name'] = hg.name - - hif = {"edges": edgj, "nodes": nodj, "incidences": incj, "network-type": network_type, - "metadata": metadata} + metadata["name"] = hg.name + + hif = { + "edges": edgj, + "nodes": nodj, + "incidences": incj, + "network-type": network_type, + "metadata": metadata, + } try: validator(hif) - if filename is not None: - json.dump(hif,open(filename,'w')) + if filename is not None: + json.dump(hif, open(filename, "w")) return hif except Exception as ex: HyperNetXError(ex) @@ -101,7 +113,7 @@ def to_hif(hg,filename=None, network_type='undirected', metadata=None): def from_hif(hif=None, filename=None): """ - Reads HIF formatted string or dictionary and returns corresponding + Reads HIF formatted string or dictionary and returns corresponding hnx.Hypergraph Parameters @@ -116,7 +128,7 @@ def from_hif(hif=None, filename=None): Returns ------- hnx.Hypergraph - + """ if hif is not None: try: @@ -125,48 +137,59 @@ def from_hif(hif=None, filename=None): HyperNetXError(ex) return None elif filename is not None: - hif = json.load(open(filename,'r')) + hif = json.load(open(filename, "r")) try: validator(hif) except Exception as ex: HyperNetXError(ex) - return None + return None else: - print('No data given') - - mkdd = lambda : {'weight':1, 'attrs':{}} + print("No data given") + + mkdd = lambda: {"weight": 1, "attrs": {}} hifex = deepcopy(hif) - parts = {part:deepcopy(pd.DataFrame(hifex.get(part,{}))) for part in ['nodes','edges','incidences']} - metadata = hifex.get('metadata',{}) - defaults = metadata.get('default_attrs',{}) - defaults = {part: defaults.get(part,mkdd()) for part in parts} + parts = { + part: deepcopy(pd.DataFrame(hifex.get(part, {}))) + for part in ["nodes", "edges", "incidences"] + } + metadata = hifex.get("metadata", {}) + defaults = metadata.get("default_attrs", {}) + defaults = {part: defaults.get(part, mkdd()) for part in parts} # cols = dict() - default_weights = {part:defaults[part].get('weight',1) for part in parts} + default_weights = {part: defaults[part].get("weight", 1) for part in parts} for part in parts: if len(part) == 0: continue thispart = parts[part] d = deepcopy(defaults[part]) - dkeys = [k for k in d.keys() if k not in ['weight','attrs']] + dkeys = [k for k in d.keys() if k not in ["weight", "attrs"]] # cols[part] = ['weight'] + dkeys + ['attrs'] if len(dkeys) > 0: for attr in dkeys: - thispart[attr] = [row.attrs.pop(attr,d[attr]) for row in thispart.itertuples()] + thispart[attr] = [ + row.attrs.pop(attr, d[attr]) for row in thispart.itertuples() + ] hyp_objects = dict() - for part in ['nodes','edges']: + for part in ["nodes", "edges"]: if len(parts[part]) > 0: uid = part[:-1] cols = [uid] + list(set(parts[part].columns).difference([uid])) hyp_objects[part] = parts[part][cols] else: hyp_objects[part] = None - cols = ['edge','node'] + list(set(parts['incidences'].columns).difference(['edge','node'])) - incidences = parts['incidences'][cols] - name = metadata.get('name',None) - return hnx.Hypergraph(incidences, default_cell_weight=default_weights['incidences'], - misc_cell_properties_col='attrs', - node_properties=hyp_objects['nodes'], default_edge_weight=default_weights['edges'], - edge_properties=hyp_objects['edges'], default_node_weight=default_weights['nodes'], - misc_properties_col='attrs', - name=name - ) \ No newline at end of file + cols = ["edge", "node"] + list( + set(parts["incidences"].columns).difference(["edge", "node"]) + ) + incidences = parts["incidences"][cols] + name = metadata.get("name", None) + return hnx.Hypergraph( + incidences, + default_cell_weight=default_weights["incidences"], + misc_cell_properties_col="attrs", + node_properties=hyp_objects["nodes"], + default_edge_weight=default_weights["edges"], + edge_properties=hyp_objects["edges"], + default_node_weight=default_weights["nodes"], + misc_properties_col="attrs", + name=name, + ) diff --git a/hypernetx/utils/toys/lesmis.py b/hypernetx/utils/toys/lesmis.py index cb8fd497..5c519471 100644 --- a/hypernetx/utils/toys/lesmis.py +++ b/hypernetx/utils/toys/lesmis.py @@ -42,7 +42,7 @@ def __init__(self): @property def dnames(self): return self.df_names.set_index("Symbol") - + def hypergraph_example(self): names = self.df_names @@ -62,11 +62,11 @@ def hypergraph_example(self): cell_weight_col="weight", node_properties=nprops, node_weight_prop_col="weight", - name="LesMis example from HNX" + name="LesMis example from HNX", ) - lm.nodes['JV'].job = 'mayor' - lm.nodes['MY'].avocation = 'to be kind' - lm.nodes['BS'].vocation = 'explorer' + lm.nodes["JV"].job = "mayor" + lm.nodes["MY"].avocation = "to be kind" + lm.nodes["BS"].vocation = "explorer" return lm @@ -80,6 +80,7 @@ def lesmis_hypergraph_from_df(df, by="Chapter", on="Characters"): } ) + def lesmis_hypergraph(): lesmis = LesMis() names = lesmis.df_names @@ -100,11 +101,10 @@ def lesmis_hypergraph(): node_properties=nprops, node_weight_prop_col="weight", ) - lm.nodes['JV'].job = 'mayor' - lm.nodes['MY'].avocation = 'to be kind' - lm.nodes['BS'].vocation = 'explorer' + lm.nodes["JV"].job = "mayor" + lm.nodes["MY"].avocation = "to be kind" + lm.nodes["BS"].vocation = "explorer" return lm - def book_tour(df, xlabel="Book", ylabel="Volume", s=3.5):