From db5522a33ed83c26b52edbc50903956f807ed22b Mon Sep 17 00:00:00 2001 From: Caleb Schilly Date: Thu, 26 Sep 2024 12:28:28 -0400 Subject: [PATCH 1/2] #543: remove lbsVisualizer and lbsGridStreamer --- docs/docs_config.py | 4 - src/lbaf/IO/lbsGridStreamer.py | 157 --- src/lbaf/IO/lbsVisualizer.py | 898 ------------------ .../IO/test_lbs_visualizer_deprecation.py | 76 -- 4 files changed, 1135 deletions(-) delete mode 100644 src/lbaf/IO/lbsGridStreamer.py delete mode 100644 src/lbaf/IO/lbsVisualizer.py delete mode 100644 tests/unit/IO/test_lbs_visualizer_deprecation.py diff --git a/docs/docs_config.py b/docs/docs_config.py index 17660c9d..8bf08bef 100644 --- a/docs/docs_config.py +++ b/docs/docs_config.py @@ -30,8 +30,6 @@ # IO import lbaf.IO.lbsConfigurationValidator as ConfigurationValidator import lbaf.IO.lbsConfigurationUpgrader as configurationUpgrader -import lbaf.IO.lbsGridStreamer as GridStreamer -import lbaf.IO.lbsVisualizer as Visualizer import lbaf.IO.lbsStatistics as lbsStatistics import lbaf.IO.lbsVTDataReader as LoadReader import lbaf.IO.lbsVTDataWriter as VTDataWriter @@ -88,8 +86,6 @@ TemperedCriterion, JSONDataFilesValidator, ConfigurationValidator, - GridStreamer, - Visualizer, lbsStatistics, LoadReader, VTDataWriter, diff --git a/src/lbaf/IO/lbsGridStreamer.py b/src/lbaf/IO/lbsGridStreamer.py deleted file mode 100644 index bad40e63..00000000 --- a/src/lbaf/IO/lbsGridStreamer.py +++ /dev/null @@ -1,157 +0,0 @@ -# -#@HEADER -############################################################################### -# -# lbsGridStreamer.py -# DARMA/LB-analysis-framework => LB Analysis Framework -# -# Copyright 2019-2024 National Technology & Engineering Solutions of Sandia, LLC -# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. -# Government retains certain rights in this software. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Questions? Contact darma@sandia.gov -# -############################################################################### -#@HEADER -# -from typing import Optional - -try: - import vtk - using_vtk = True -except ModuleNotFoundError: - using_vtk = False - -from ..Utils.lbsLogging import Logger, get_logger - - -class GridStreamer: - """A class containing to stream a grid with time-varying attributes.""" - - def __init__( - self, - points, # vtk.vtkPoints - lines, # vtk.vtkCellArray - field_arrays: Optional[dict] = None, - point_arrays: Optional[list] = None, - cell_arrays: Optional[list] = None, - logger: Logger = None): - """Class constructor.""" - # Defaults - if field_arrays is None: - field_arrays = {} - if point_arrays is None: - point_arrays = [] - if cell_arrays is None: - cell_arrays = [] - - if not using_vtk: - raise ModuleNotFoundError("Could not find vtk module, which is required for the GridStreamer.") - - # Assign logger to instance variable - self.__logger = logger - - # Sanity checks - self.error = False - if not isinstance(points, vtk.vtkPoints): - self.__logger.error("A vtkPoints instance is required as points input") - self.error = True - return - if not isinstance(lines, vtk.vtkCellArray): - self.__logger.error("A vtkCellArray instance is required as lines input") - self.error = True - return - if not isinstance(field_arrays, dict): - self.__logger.error("A dict of vtkDataArray instances is required as field data input") - self.error = True - if not isinstance(point_arrays, list): - self.__logger.error("A list of dicts of vtkDataArray instances is required as point data input") - self.error = True - return - if not isinstance(cell_arrays, list): - self.__logger.error("A list of vtkDataArray instances is required as cell data input") - self.error = True - return - - # Keep track of requested number of steps and check consistency - if any([(n_steps := len(cell_arrays)) != len(point_arrays)]): - self.__logger.error(f"Number of point array dicts not all equal to {n_steps}") - self.error = True - return - - # More sanity checks - for f_name, f_list in field_arrays.items(): - if n_steps != len(f_list): - self.__logger.error(f"Number of {f_name} arrays and data arrays do not match: {len(f_list)} <> {n_steps}") - self.error = True - return - - # Instantiate the streaming source - self.__logger.info(f"Streaming {n_steps} load-balancing steps") - self.algorithm = vtk.vtkProgrammableSource() - - # Set source information - info = self.algorithm.GetExecutive().GetOutputInformation().GetInformationObject(0) - info.Set(vtk.vtkStreamingDemandDrivenPipeline.TIME_RANGE(), - [0, n_steps - 1], 2) - info.Set(vtk.vtkStreamingDemandDrivenPipeline.TIME_STEPS(), - range(n_steps), n_steps) - - # Implement RequestData() method for VTK pipeline - def request_data_method(): - # Retrieve information vector - info = self.algorithm.GetExecutive().GetOutputInformation().GetInformationObject(0) - - # Make the source is able to provide time steps - output = self.algorithm.GetPolyDataOutput() - t_s = info.Get(vtk.vtkStreamingDemandDrivenPipeline.UPDATE_TIME_STEP()) - output.GetInformation().Set(vtk.vtkDataObject.DATA_TIME_STEP(), t_s) - - # Assign geometry and topology of output - output.SetPoints(points) - output.SetLines(lines) - - # Assign topology and field data to output for timestep index - i = int(t_s) - for f_name, f_list in field_arrays.items(): - if n_steps != len(f_list): - get_logger().error( - f"Number of {f_name} arrays and data arrays do not match: {len(f_list)} <> {n_steps}") - self.error = True - return - output.GetFieldData().AddArray(f_list[i]) - - # Assign data attributes to output for time step index - for k, v in point_arrays[i].items(): - output.GetPointData().AddArray(v) - self.__logger.debug(f"Added {k} point array") - output.GetCellData().AddArray(cell_arrays[i]) - - # Set VTK RequestData() to programmable source - self.algorithm.SetExecuteMethod(request_data_method) diff --git a/src/lbaf/IO/lbsVisualizer.py b/src/lbaf/IO/lbsVisualizer.py deleted file mode 100644 index 88c0a0ca..00000000 --- a/src/lbaf/IO/lbsVisualizer.py +++ /dev/null @@ -1,898 +0,0 @@ -# -#@HEADER -############################################################################### -# -# lbsVisualizer.py -# DARMA/LB-analysis-framework => LB Analysis Framework -# -# Copyright 2019-2024 National Technology & Engineering Solutions of Sandia, LLC -# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. -# Government retains certain rights in this software. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Questions? Contact darma@sandia.gov -# -############################################################################### -#@HEADER -# -import math -import numbers -import os -import random -import sys -from logging import Logger - -try: - import matplotlib.pyplot as plt - import vtk - from .lbsGridStreamer import GridStreamer -except ModuleNotFoundError: - pass - -from ..Model.lbsPhase import Phase - - -class Visualizer: - """A class to visualize LBAF results via mesh files and VTK views. - - lbsVisualizer has been deprecated in favor of vt-tv. This module will be removed - in a future release.""" - - def __init__( - self, - logger: Logger, - qoi_request: list, - continuous_object_qoi: bool, - phases: list, - grid_size: list, - object_jitter=0.0, - output_dir='.', - output_file_stem="LBAF_out", - distributions=None, - statistics=None, - resolution=1.): - """Class constructor: - qoi_request: description of rank and object quantities of interest - continuous_object_qoi: always treat object QOI as continuous or not - phases: list of Phase instances - grid_size: iterable containing grid sizes in each dimension - object_jitter: coefficient of random jitter with magnitude < 1 - output_dir: output directory - output_file_stem: file name stem - distributions: a dictionary of per-phase QOI distributions - statistics: a dictionary of per-phase global statistics - resolution: grid_resolution value.""" - - # Throw deprecation error immediately - raise DeprecationWarning("LBAF's Visualizer has been deprecated and will be removed in a future release. Visualizations should be generated with DARMA/vt-tv.") - - # Assign logger to instance variable - self.__logger = logger - - # Useful fields - self.__rank_points = None - self.__rank_lines = None - self.__volumes = None - self.__field_data = None - - if not distributions: - distributions = {} - - if not statistics: - distributions = {} - - # Make sure that rank quantity of interest name was passed - if not isinstance(qoi_request, list) or (l_req := len(qoi_request)) != 3: - self.__logger.error( - f"Visualizer expects 3 quantity of interest parameters and not {l_req}") - raise SystemExit(1) - if not (rank_qoi := qoi_request[0]) or not isinstance(rank_qoi, str): - self.__logger.error( - "Visualizer expects a non-empty rank quantity of interest name") - raise SystemExit(1) - self.__rank_qoi = rank_qoi - - # When rank QOI range was passed make sure it is consistent - rank_qoi_max = qoi_request[1] - if rank_qoi_max is not None: - if not isinstance(rank_qoi_max, float): - self.__logger.error( - f"Inconsistent quantity of interest maximum: {rank_qoi_max}") - raise SystemExit(1) - - # When object QOI name was passed make sure it is consistent - req_str = f"Creating visualization for rank {self.__rank_qoi}" - if (object_qoi := qoi_request[2]) and not isinstance(object_qoi, str): - self.__logger.error( - "Optional object quantity of interest name must be a string") - raise SystemExit(1) - if object_qoi: - self.__object_qoi = object_qoi - req_str += f" and object {self.__object_qoi}" - else: - self.__object_qoi = None - self.__logger.info(req_str) - - # Make sure that Phase instances were passed - if not all([isinstance(p, Phase) for p in phases.values()]): - self.__logger.error( - "Visualizer expects a dictionary of phases as input") - raise SystemExit(1) - self.__phases = phases - - # Ensure that all phases have the same number of ranks - n_r = next(iter(phases.values())).get_number_of_ranks() - if not all([p.get_number_of_ranks() == n_r for p in phases.values()]): - self.__logger.error( - f"All phases must have {n_r} ranks as the first one") - raise SystemExit(1) - self.__n_ranks = n_r - - # Ensure that specified grid resolution is correct - if not isinstance(resolution, numbers.Number) or resolution <= 0.: - self.__logger.error( - "Grid resolution must be a positive number") - raise SystemExit(1) - self.__grid_resolution = float(resolution) - - # Determine available dimensions for object placement in ranks - self.__grid_size = grid_size - self.__rank_dims = [ - d for d in range(3) if self.__grid_size[d] > 1] - self.__max_o_per_dim = 0 - - # Compute constant per object jitter - self.__jitter_dims = { - i: [(random.random() - 0.5) * object_jitter - if d in self.__rank_dims - else 0.0 for d in range(3)] - for i in next(iter(self.__phases.values())).get_object_ids()} - - # Initialize maximum object atrribute values - self.__object_load_max = 0.0 - self.__object_volume_max = 0.0 - - # Compute discrete or pseudo-continuous object QOI range - self.__object_qoi_range = self.compute_object_qoi_range( - object_qoi, continuous_object_qoi) - - # Assemble file and path names from constructor parameters - self.__rank_file_name = f"{output_file_stem}_rank_view.e" - self.__object_file_name = f"{output_file_stem}_object_view" - if output_dir is not None: - self.__rank_file_name = os.path.join( - output_dir, self.__rank_file_name) - self.__object_file_name = os.path.join( - output_dir, self.__object_file_name) - self.__visualization_file_name = os.path.join( - output_dir, output_file_stem) - - # Retrieve and verify rank attribute distributions - self.__rank_attributes = { - k: distributions.get(f"rank {k}", []) - for k in list({"load", "work", self.__rank_qoi})} - if not all((n_dis := len(self.__rank_attributes["load"])) == len(v) - for v in self.__rank_attributes.values()): - self.__logger.error( - "Rank attribute distributions do not have equal lengths") - raise SystemExit(1) - self.__distributions = distributions - - # Retrieve and verify globale statistics - if not isinstance(statistics, dict): - self.__logger.error( - "Global statistics must be passed in a dictionary") - raise SystemExit(1) - self.__statistics = statistics - - # Assign or compute rank quantity of interest range - self.__rank_qoi_range = [ - min(y for x in self.__rank_attributes[self.__rank_qoi] for y in x)] - if rank_qoi_max is None: - self.__rank_qoi_range.append( - max(y for x in self.__rank_attributes[self.__rank_qoi] for y in x)) - else: - self.__rank_qoi_range.append(rank_qoi_max) - self.__logger.info( - f"\trank {self.__rank_qoi} range: [{self.__rank_qoi_range[0]:.4g}; {self.__rank_qoi_range[1]:.4g}]") - - # Create attribute data arrays for rank loads and works - self.__logger.info( - "Adding attributes " + ", ".join(self.__rank_attributes)) - self.__qoi_dicts = [] - for _ in range(n_dis): - # Create and append new rank QOI dictionaries - arr_dict = {} - self.__qoi_dicts.append(arr_dict) - for k in self.__rank_attributes.keys(): - qoi_arr = vtk.vtkDoubleArray() - qoi_arr.SetName(k) - qoi_arr.SetNumberOfTuples(self.__n_ranks) - arr_dict[k] = qoi_arr - - @staticmethod - def global_id_to_cartesian(flat_id, grid_sizes): - """Map global index to its Cartesian grid coordinates.""" - # Sanity check - n01 = grid_sizes[0] * grid_sizes[1] - if flat_id < 0 or flat_id >= n01 * grid_sizes[2]: - return None, None, None - - # Compute successive Euclidean divisions - k, r = divmod(flat_id, n01) - j, i = divmod(r, grid_sizes[0]) - - # Return Cartesian coordinates - return i, j, k - - def compute_object_qoi_range(self, object_qoi, continuous_object_qoi): - """Decide object quantity storage type and compute it.""" - - # Return empty range if no object QOI was passed - if not object_qoi: - return () - - # Initialize space-time object QOI range attributes - oq_min, oq_max, oq_all, = math.inf, -math.inf, set() - - # Iterate over all phases - for phase in self.__phases.values(): - # Iterate over all objects in phase - for o in phase.get_objects(): - # Update maximum object load as needed - if (ol := o.get_load()) > self.__object_load_max: - self.__object_load_max = ol - - # Retain all QOI values while support remains small - oq = getattr(o, f"get_{object_qoi}")() - - # Check if the QOI value ends in .0, then convert to integer - if isinstance(oq, float) and oq.is_integer(): - oq = int(oq) - - if not continuous_object_qoi: - oq_all.add(oq) - if len(oq_all) > 20: - # Do not store QOI values if support is too large - oq_all = None - continuous_object_qoi = True - - # Update extrema - if oq < oq_min: - oq_min = oq - if oq > oq_max: - oq_max = oq - - # Store either range or support - if continuous_object_qoi: - object_qoi_range = (oq_min, oq_max) - self.__logger.info( - f"\tobject {self.__object_qoi} range: [{object_qoi_range[0]:.4g}; {object_qoi_range[1]:.4g}]") - else: - object_qoi_range = oq_all - self.__logger.info( - f"\tobject {self.__object_qoi} has {len(object_qoi_range)} distinct values") - - # Return cpmputed QOI range - return object_qoi_range - - def __create_rank_mesh(self, iteration: int): - """Map ranks to polygonal mesh.""" - # Assemble and return polygonal mesh - pd_mesh = vtk.vtkPolyData() - pd_mesh.SetPoints(self.__rank_points) - pd_mesh.SetLines(self.__rank_lines) - pd_mesh.GetPointData().SetScalars( - self.__qoi_dicts[iteration][self.__rank_qoi]) - return pd_mesh - - def __create_object_mesh(self, phase: Phase, object_mapping: set): - """Map objects to polygonal mesh.""" - # Retrieve number of mesh points and bail out early if empty set - n_o = phase.get_number_of_objects() - if not n_o: - self.__logger.warning("Empty list of objects, cannot write a mesh file") - return - - # Create point array for object quantity of interest - q_arr = vtk.vtkDoubleArray() - q_arr.SetName(self.__object_qoi) - q_arr.SetNumberOfTuples(n_o) - - # Load array must be added when it is not the object QOI - if self.__object_qoi != "load": - l_arr = vtk.vtkDoubleArray() - l_arr.SetName("load") - l_arr.SetNumberOfTuples(n_o) - else: - l_arr = None - - # Create bit array for object migratability - b_arr = vtk.vtkBitArray() - b_arr.SetName("migratable") - b_arr.SetNumberOfTuples(n_o) - - # Create and size point set - points = vtk.vtkPoints() - points.SetNumberOfPoints(n_o) - - # Retrieve elements constant across all ranks - p_id = phase.get_id() - ranks = phase.get_ranks() - object_qoi = self.__distributions[f"object {self.__object_qoi}"][p_id] - - # Iterate over ranks and objects to create mesh points - point_index, point_to_index, sent_volumes = 0, {}, [] - for rank_id, objects in enumerate(object_mapping): - # Determine rank offsets - offsets = [ - self.__grid_resolution * c - for c in self.global_id_to_cartesian( - rank_id, self.__grid_size)] - - # Compute local object block parameters - n_o_rank = len(objects) - n_o_per_dim = math.ceil(n_o_rank ** ( - 1. / len(self.__rank_dims))) - if n_o_per_dim > self.__max_o_per_dim: - self.__max_o_per_dim = n_o_per_dim - o_resolution = self.__grid_resolution / (n_o_per_dim + 1.) - - # Iterate over objects and create point coordinates - self.__logger.debug( - f"Arranging a maximum of {n_o_per_dim} objects per dimension in {self.__rank_dims}") - rank_size = [n_o_per_dim - if d in self.__rank_dims - else 1 for d in range(3)] - centering = [0.5 * o_resolution * (n_o_per_dim - 1.) - if d in self.__rank_dims - else 0.0 for d in range(3)] - - # Order objects of current rank - r = ranks[rank_id] - objects_list = sorted(objects, key=lambda x: x.get_id()) - ordered_objects = {o: 0 for o in objects_list if r.is_sentinel(o)} - for o in objects_list: - if not r.is_sentinel(o): - ordered_objects[o] = 1 - - # Add rank objects to points set - for i, (o, m) in enumerate(ordered_objects.items()): - # Insert point using offset and rank coordinates - points.SetPoint(point_index, [ - offsets[d] - centering[d] + ( - self.__jitter_dims.get( - o.get_id(), - (0.0, 0.0, 0.0))[d] + c) * o_resolution - for d, c in enumerate(self.global_id_to_cartesian( - i, rank_size))]) - - # Set object attributes - q_arr.SetTuple1(point_index, object_qoi[o.get_id()]) - b_arr.SetTuple1(point_index, m) - if l_arr: - l_arr.SetTuple1(point_index, o.get_load()) - - # Update sent volumes - for k, v in o.get_sent().items(): - sent_volumes.append((point_index, k, v)) - - # Update maps and counters - point_to_index[o] = point_index - point_index += 1 - - # Initialize containers for edge lines and attribute - v_arr = vtk.vtkDoubleArray() - v_arr.SetName("volume") - lines = vtk.vtkCellArray() - n_e, edge_values = 0, {} - - # Create object mesh edges and assign volume values - self.__logger.debug("\tCreating inter-object communication edges:") - for pt_index, k, v in sent_volumes: - # Retrieve undirected edge point indices - i, j = sorted((pt_index, point_to_index[k])) - ij = frozenset((i, j)) - - # Update or create edge - if (e_ij := edge_values.get(ij)) is None: - # Edge must be created - self.__logger.debug(f"\tcreating edge {n_e} ({i}--{j}): {v}") - edge_values[ij] = [n_e, v] - n_e += 1 - v_arr.InsertNextTuple1(v) - line = vtk.vtkLine() - line.GetPointIds().SetId(0, i) - line.GetPointIds().SetId(1, j) - lines.InsertNextCell(line) - else: - # Edge already exists and must be updated - e_ij[1] += v - self.__logger.debug(f"\tupdating edge {e_ij[0]} ({i}--{j}): {e_ij[1]}") - v_arr.SetTuple1(e_ij[0], e_ij[1]) - - # Create and return VTK polygonal data mesh - self.__logger.info( - f"Assembling phase {p_id} object mesh with {n_o} points and {n_e} edges") - pd_mesh = vtk.vtkPolyData() - pd_mesh.SetPoints(points) - pd_mesh.SetLines(lines) - pd_mesh.GetPointData().SetScalars(q_arr) - pd_mesh.GetPointData().AddArray(b_arr) - if l_arr: - pd_mesh.GetPointData().AddArray(l_arr) - pd_mesh.GetCellData().SetScalars(v_arr) - return pd_mesh - - @staticmethod - def create_color_transfer_function(attribute_range, scheme=None): - """Create a color transfer function given attribute range.""" - - # Create dicretizable color transfer function - ctf = vtk.vtkDiscretizableColorTransferFunction() - ctf.SetNanColorRGBA(1., 1., 1., 0.) - ctf.UseBelowRangeColorOn() - ctf.UseAboveRangeColorOn() - - # Make discrete when requested - if isinstance(attribute_range, set): - ctf.DiscretizeOn() - n_colors = len(attribute_range) - ctf.IndexedLookupOn() - ctf.SetNumberOfIndexedColors(n_colors) - for i, v in enumerate(sorted(attribute_range)): - ctf.SetAnnotation(v, f"{v}") - ctf.SetIndexedColorRGBA(i, plt.cm.get_cmap("tab20")(i)) - ctf.Build() - return ctf - - # Otherwise set color transfer function depending on chosen scheme - if scheme == "blue_to_red": - ctf.SetColorSpaceToDiverging() - mid_point = (attribute_range[0] + attribute_range[1]) * .5 - ctf.AddRGBPoint(attribute_range[0], .231, .298, .753) - ctf.AddRGBPoint(mid_point, .865, .865, .865) - ctf.AddRGBPoint(attribute_range[1], .906, .016, .109) - ctf.SetBelowRangeColor(0.0, 1.0, 0.0) - ctf.SetAboveRangeColor(1.0, 0.0, 1.0) - elif scheme == "white_to_black": - ctf.AddRGBPoint(attribute_range[0], 1.0, 1.0, 1.0) - ctf.AddRGBPoint(attribute_range[1], 0.0, 0.0, 0.0) - ctf.SetBelowRangeColor(0.0, 0.0, 1.0) - ctf.SetAboveRangeColor(1.0, 0.0, 0.0) - else: - # Default color spectrum from green to orange via yellow - mid_point = (attribute_range[0] + attribute_range[1]) * .5 - ctf.AddRGBPoint(attribute_range[0], .431, .761, .161) - ctf.AddRGBPoint(mid_point, .98, .992, .059) - ctf.AddRGBPoint(attribute_range[1], 1.0, .647, 0.0) - ctf.SetBelowRangeColor(0.8, 0.8, .8) - ctf.SetAboveRangeColor(1.0, 0.0, 1.0) - - # Return color transfer function - return ctf - - @staticmethod - def create_scalar_bar_actor(mapper, title, x, y, values=None): - """Create scalar bar with default and custom parameters.""" - - # Instantiate scalar bar linked to given mapper - scalar_bar_actor = vtk.vtkScalarBarActor() - scalar_bar_actor.SetLookupTable(mapper.GetLookupTable()) - - # Set default parameters - scalar_bar_actor.SetOrientationToHorizontal() - scalar_bar_actor.UnconstrainedFontSizeOn() - scalar_bar_actor.SetHeight(0.08) - scalar_bar_actor.SetWidth(0.42) - scalar_bar_actor.SetBarRatio(0.3) - scalar_bar_actor.DrawTickLabelsOn() - scalar_bar_actor.SetLabelFormat("%.2G") - if values: - scalar_bar_actor.SetNumberOfLabels(len(values)) - scalar_bar_actor.SetAnnotationLeaderPadding(8) - scalar_bar_actor.SetTitle(title.title().replace('_', ' ') + '\n') - else: - scalar_bar_actor.SetNumberOfLabels(2) - scalar_bar_actor.SetTitle(title.title().replace('_', ' ')) - for text_prop in ( - scalar_bar_actor.GetTitleTextProperty(), - scalar_bar_actor.GetLabelTextProperty(), - scalar_bar_actor.GetAnnotationTextProperty()): - text_prop.SetColor(0.0, 0.0, 0.0) - text_prop.ItalicOff() - text_prop.BoldOff() - text_prop.SetFontFamilyToArial() - text_prop.SetFontSize(60) - - # Set custom parameters - position = scalar_bar_actor.GetPositionCoordinate() - position.SetCoordinateSystemToNormalizedViewport() - position.SetValue(x, y, 0.0) - - # Return created scalar bar actor - return scalar_bar_actor - - def __create_rendering_pipeline( - self, - iteration: int, - p_id: int, - object_mesh, - edge_width: int, - glyph_factor: float, - win_size: int): - """Create VTK-based pipeline all the way to render window.""" - - # Create rank mesh for current phase - rank_mesh = self.__create_rank_mesh(iteration) - - # Create renderer with parallel projection - renderer = vtk.vtkRenderer() - renderer.SetBackground(1.0, 1.0, 1.0) - renderer.GetActiveCamera().ParallelProjectionOn() - - # Create square glyphs at ranks - rank_glyph = vtk.vtkGlyphSource2D() - rank_glyph.SetGlyphTypeToSquare() - rank_glyph.SetScale(.95) - rank_glyph.FilledOn() - rank_glyph.CrossOff() - rank_glypher = vtk.vtkGlyph2D() - rank_glypher.SetSourceConnection(rank_glyph.GetOutputPort()) - rank_glypher.SetInputData(rank_mesh) - rank_glypher.SetScaleModeToDataScalingOff() - - # Lower glyphs slightly for visibility - z_lower = vtk.vtkTransform() - z_lower.Translate(0.0, 0.0, -0.01) - trans = vtk.vtkTransformPolyDataFilter() - trans.SetTransform(z_lower) - trans.SetInputConnection(rank_glypher.GetOutputPort()) - - # Create mapper for rank glyphs - rank_mapper = vtk.vtkPolyDataMapper() - rank_mapper.SetInputConnection(trans.GetOutputPort()) - rank_mapper.SetLookupTable( - self.create_color_transfer_function(( - self.__rank_qoi_range[0], self.__rank_qoi_range[1]),"blue_to_red")) - rank_mapper.SetScalarRange(self.__rank_qoi_range) - - # Create rank QOI and its scalar bar actors - rank_actor = vtk.vtkActor() - rank_actor.SetMapper(rank_mapper) - qoi_actor = self.create_scalar_bar_actor( - rank_mapper, f"rank {self.__rank_qoi}", 0.5, 0.9) - qoi_actor.DrawBelowRangeSwatchOn() - qoi_actor.SetBelowRangeAnnotation('<') - qoi_actor.DrawAboveRangeSwatchOn() - qoi_actor.SetAboveRangeAnnotation('>') - renderer.AddActor(rank_actor) - renderer.AddActor2D(qoi_actor) - - # Create object pipeline only when requested - if self.__object_qoi: - # Create white to black look-up table - bw_lut = vtk.vtkLookupTable() - bw_lut.SetTableRange((0.0, self.__object_volume_max)) - bw_lut.SetSaturationRange(0, 0) - bw_lut.SetHueRange(0, 0) - bw_lut.SetValueRange(1, 0) - bw_lut.SetNanColor(1.0, 1.0, 1.0, 0.0) - bw_lut.Build() - - # Create mapper for inter-object edges - edge_mapper = vtk.vtkPolyDataMapper() - edge_mapper.SetInputData(object_mesh) - edge_mapper.SetScalarModeToUseCellData() - edge_mapper.SetScalarRange((0.0, self.__object_volume_max)) - edge_mapper.SetLookupTable(bw_lut) - - # Create communication volume and its scalar bar actors - edge_actor = vtk.vtkActor() - edge_actor.SetMapper(edge_mapper) - edge_actor.GetProperty().SetLineWidth(edge_width) - volume_actor = self.create_scalar_bar_actor( - edge_mapper, "Inter-Object Volume", 0.04, 0.04) - renderer.AddActor(edge_actor) - renderer.AddActor2D(volume_actor) - - # Compute square root of object loads - sqrtL = vtk.vtkArrayCalculator() - sqrtL.SetInputData(object_mesh) - sqrtL.AddScalarArrayName("load") - sqrtL_str = "sqrt(load)" - sqrtL.SetFunction(sqrtL_str) - sqrtL.SetResultArrayName(sqrtL_str) - sqrtL.Update() - sqrtL_out = sqrtL.GetOutput() - sqrtL_out.GetPointData().SetActiveScalars("migratable") - - # Glyph sentinel and migratable objects separately - glyph_mapper = None - for k, v in {0.0: "Square", 1.0: "Circle"}.items(): - # Threshold by migratable status - thresh = vtk.vtkThresholdPoints() - thresh.SetInputData(sqrtL_out) - thresh.ThresholdBetween(k, k) - thresh.Update() - thresh_out = thresh.GetOutput() - if not thresh_out.GetNumberOfPoints(): - continue - thresh_out.GetPointData().SetActiveScalars(sqrtL_str) - - # Glyph by square root of object quantity of interest - glyph = vtk.vtkGlyphSource2D() - getattr(glyph, f"SetGlyphTypeTo{v}")() - glyph.SetResolution(32) - glyph.SetScale(1.0) - glyph.FilledOn() - glyph.CrossOff() - glypher = vtk.vtkGlyph3D() - glypher.SetSourceConnection(glyph.GetOutputPort()) - glypher.SetInputData(thresh_out) - glypher.SetScaleModeToScaleByScalar() - glypher.SetScaleFactor(glyph_factor) - glypher.Update() - glypher.GetOutput().GetPointData().SetActiveScalars( - self.__object_qoi) - - # Raise glyphs slightly for visibility - z_raise = vtk.vtkTransform() - z_raise.Translate(0.0, 0.0, 0.01) - trans = vtk.vtkTransformPolyDataFilter() - trans.SetTransform(z_raise) - trans.SetInputData(glypher.GetOutput()) - - # Create mapper and actor for glyphs - glyph_mapper = vtk.vtkPolyDataMapper() - glyph_mapper.SetInputConnection(trans.GetOutputPort()) - glyph_mapper.SetLookupTable( - self.create_color_transfer_function( - self.__object_qoi_range)) - if (is_continuous := isinstance(self.__object_qoi_range, tuple)): - glyph_mapper.SetScalarRange(self.__object_qoi_range) - glyph_actor = vtk.vtkActor() - glyph_actor.SetMapper(glyph_mapper) - renderer.AddActor(glyph_actor) - - # Create and add unique scalar bar for object QOI when available - if glyph_mapper: - load_actor = self.create_scalar_bar_actor( - glyph_mapper, f"object {self.__object_qoi}", 0.52, 0.04, - None if is_continuous else self.__object_qoi_range) - renderer.AddActor2D(load_actor) - - # Create text actor to indicate iteration and imbalance - lb_data = self.__field_data["load imbalance"] - text_actor = vtk.vtkTextActor() - text_actor.SetInput( - f"Phase ID: {p_id}" - f" Iteration: {iteration}/{len(lb_data) - 1}\n" - f"Load Imbalance: {lb_data[iteration].GetTuple1(0):.4g}") - text_prop = text_actor.GetTextProperty() - text_prop.SetColor(0.0, 0.0, 0.0) - text_prop.ItalicOff() - text_prop.BoldOff() - text_prop.SetFontFamilyToArial() - text_prop.SetFontSize(60) - text_prop.SetLineSpacing(1.5) - position = text_actor.GetPositionCoordinate() - position.SetCoordinateSystemToNormalizedViewport() - position.SetValue(0.04, 0.91, 0.0) - renderer.AddActor(text_actor) - - # Create and return render window - renderer.ResetCamera() - render_window = vtk.vtkRenderWindow() - render_window.AddRenderer(renderer) - render_window.SetWindowName("LBAF") - render_window.SetSize(win_size, win_size) - return render_window - - def generate(self, save_meshes: bool, gen_vizqoi: bool): - """Generate mesh and multimedia outputs.""" - - # Iterate over ranks and create rank mesh points - self.__rank_points = vtk.vtkPoints() - self.__rank_points.SetNumberOfPoints(self.__n_ranks) - for i in range(self.__n_ranks): - # Insert point based on Cartesian coordinates - self.__rank_points.SetPoint(i, [ - self.__grid_resolution * c - for c in self.global_id_to_cartesian( - i, self.__grid_size)]) - - # Set point attributes from distribution values - for j, qoi_dict in enumerate(self.__qoi_dicts): - for k, v in self.__rank_attributes.items(): - qoi_dict[k].SetTuple1(i, v[j][i]) - - # Iterate over all possible rank links and create edges - self.__rank_lines = vtk.vtkCellArray() - index_to_edge = {} - edge_index = 0 - for i in range(self.__n_ranks): - for j in range(i + 1, self.__n_ranks): - # Insert new link based on endpoint indices - line = vtk.vtkLine() - line.GetPointIds().SetId(0, i) - line.GetPointIds().SetId(1, j) - self.__rank_lines.InsertNextCell(line) - - # Update flat index map - index_to_edge[edge_index] = frozenset([i, j]) - edge_index += 1 - - # Number of edges is fixed due to vtkExodusIIWriter limitation - n_e = int(self.__n_ranks * (self.__n_ranks - 1) / 2) - self.__logger.debug( - f"Assembling rank mesh with {self.__n_ranks} points and {n_e} edges") - - # Create attribute data arrays for edge sent volumes - self.__volumes = [] - for i, sent in enumerate(self.__distributions["sent"]): - # Reduce directed edges into undirected ones - u_edges = {} - for k, v in sent.items(): - u_edges[frozenset(k)] = u_edges.setdefault(frozenset(k), 0.) + v - - # Create and append new volume array for edges - v_arr = vtk.vtkDoubleArray() - v_arr.SetName("largest directed volume") - v_arr.SetNumberOfTuples(n_e) - self.__volumes.append(v_arr) - - # Assign edge volume values - self.__logger.debug(f"\titeration {i} edges:") - for e, edge in index_to_edge.items(): - v = u_edges.get(edge, float("nan")) - v_arr.SetTuple1(e, v) - if v > self.__object_volume_max: - self.__object_volume_max = v - self.__logger.debug(f"\t{e} {edge}): {v}") - - # Create and populate field arrays for statistics - self.__field_data = {} - for stat_name, stat_values in self.__statistics.items(): - # Skip non-list entries - if not isinstance(stat_values, list): - continue - - # Create one singleton for each value of each statistic - for v in stat_values: - s_arr = vtk.vtkDoubleArray() - s_arr.SetNumberOfTuples(1) - s_arr.SetTuple1(0, v) - s_arr.SetName(stat_name) - self.__field_data.setdefault(stat_name, []).append(s_arr) - - # Write ExodusII rank mesh when requested - if save_meshes: - if sys.version_info.major == 3 and sys.version_info.minor == 9: - self.__logger.error( - "Cannot save meshes when using Python 3.9 (issue with vtk 9.1.0). " - "Please use Python 3.8 (vtk 9.0.1)." - ) - raise SystemExit(1) - - # Create grid streamer - streamer = GridStreamer( - self.__rank_points, - self.__rank_lines, - self.__field_data, - self.__qoi_dicts, - self.__volumes, - logger=self.__logger) - - # Write to ExodusII file when possible - if streamer.error: - self.__logger.warning( - f"Failed to instantiate a grid streamer for file {self.__rank_file_name}") - else: - writer = vtk.vtkExodusIIWriter() - writer.SetFileName(self.__rank_file_name) - writer.SetInputConnection(streamer.algorithm.GetOutputPort()) - writer.WriteAllTimeStepsOn() - writer.Update() - self.__logger.info( - f"Wrote ExodusII file: {self.__rank_file_name}") - - # Determine whether phase must be updated - update_phase = True if len( - rank_objects := self.__distributions.get("rank objects", set()) - ) == len(self.__phases) else False - - # Iterate over all object distributions - phase = next(iter(self.__phases.values())) - phase_it = iter(self.__phases.values()) - for iteration, object_mapping in enumerate(rank_objects): - # Update phase when required - if update_phase: - phase = next(phase_it) - - # Create object mesh when requested - if self.__object_qoi: - object_mesh = self.__create_object_mesh(phase, object_mapping) - - # Write to VTP file when requested - if save_meshes: - file_name = f"{self.__object_file_name}_{iteration:02d}.vtp" - writer = vtk.vtkXMLPolyDataWriter() - writer.SetFileName(file_name) - writer.SetInputData(object_mesh) - writer.Update() - self.__logger.info(f"Wrote VTP file: {file_name}") - else: - object_mesh = None - - # Generate visualizations when requested - if gen_vizqoi: - if len(self.__rank_dims) > 2: - self.__logger.warning( - "Visualization generation not yet implemented in 3-D") - continue - - # Compute visualization parameters - self.__logger.info( - f"Generating 2-D visualization for iteration {iteration}:") - ws = 800 - self.__logger.info( - f"\tnumber of pixels: {ws}x{ws}") - if self.__object_qoi: - ew = 0.1 * ws / max(self.__grid_size) - self.__logger.info( - f"\tcommunication edges width: {ew:.2g}") - gf = 0.8 * self.__grid_resolution / ( - (self.__max_o_per_dim + 1) - * math.sqrt(self.__object_load_max)) - self.__logger.info( - f"\tobject glyphs scaling: {gf:.2g}") - - # Run visualization pipeline - render_window = self.__create_rendering_pipeline( - iteration, - phase.get_id(), - object_mesh, - edge_width = ew if self.__object_qoi else None, - glyph_factor = gf if self.__object_qoi else None, - win_size = ws) - render_window.Render() - - # Convert window to image - w2i = vtk.vtkWindowToImageFilter() - w2i.SetInput(render_window) - w2i.SetScale(3) - - # Output PNG file - file_name = f"{self.__visualization_file_name}_{iteration:02d}.png" - writer = vtk.vtkPNGWriter() - writer.SetInputConnection(w2i.GetOutputPort()) - writer.SetFileName(file_name) - writer.SetCompressionLevel(2) - writer.Write() - self.__logger.info(f"Wrote PNG file: {file_name}") diff --git a/tests/unit/IO/test_lbs_visualizer_deprecation.py b/tests/unit/IO/test_lbs_visualizer_deprecation.py deleted file mode 100644 index eb6b8d68..00000000 --- a/tests/unit/IO/test_lbs_visualizer_deprecation.py +++ /dev/null @@ -1,76 +0,0 @@ -# -#@HEADER -############################################################################### -# -# test_lbs_visualizer_deprecation.py -# DARMA/LB-analysis-framework => LB Analysis Framework -# -# Copyright 2019-2024 National Technology & Engineering Solutions of Sandia, LLC -# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. -# Government retains certain rights in this software. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Questions? Contact darma@sandia.gov -# -############################################################################### -#@HEADER -# -import os -import logging -import unittest -import subprocess -import importlib -from src.lbaf.IO.lbsVisualizer import Visualizer - -class TestVizDeprecation(unittest.TestCase): - """Test for lbsVisualizer's deprecation.""" - - def test_lbs_visualizer_deprecation(self): - try: - visualizer = Visualizer( - logger=logging.getLogger(), - qoi_request=["this", "that", "the other thing"], - continuous_object_qoi=False, - phases=["phase 1", "phase 2"], - grid_size=[0,0,1,1]) - except DeprecationWarning as e: - assert str(e) == "LBAF's Visualizer has been deprecated and will be removed in a future release. Visualizations should be generated with DARMA/vt-tv." - - def test_lbs_visualizer_config(self): - config_file = os.path.join(os.path.dirname(os.path.dirname(__file__)), "config", "conf_wrong_visualization.yml") - pipes = subprocess.Popen(["python", "src/lbaf", "-c", config_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - std_err = pipes.communicate()[1].decode("utf-8") - vttv = importlib.util.find_spec('vttv') - if vttv is None: - assert "Visualization enabled but vt-tv module not found." in std_err - else: - assert "Visualization enabled but vt-tv module not found." not in std_err - assert pipes.returncode != 0 # error because missing json parameters required to run vt tv - -if __name__ == "__main__": - unittest.main() From a7e5b632e6a26c0a39319fd9bbfd807c3ee029db Mon Sep 17 00:00:00 2001 From: Caleb Schilly Date: Thu, 26 Sep 2024 14:15:36 -0400 Subject: [PATCH 2/2] #543: remove MoveCountsViewer and fix errors in documentation of dependencies --- .pylintrc | 2 +- docs/pages/dependencies.rst | 12 +- setup.cfg | 3 +- src/lbaf/Applications/MoveCountsViewer.py | 407 ---------------------- src/lbaf/__init__.py | 5 - 5 files changed, 8 insertions(+), 421 deletions(-) delete mode 100644 src/lbaf/Applications/MoveCountsViewer.py diff --git a/.pylintrc b/.pylintrc index b78f3aac..0c4db53c 100644 --- a/.pylintrc +++ b/.pylintrc @@ -408,7 +408,7 @@ ignored-classes=optparse.Values,thread._local,_thread._local,lbaf.IO.lbsStatisti # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis). It # supports qualified module names, as well as Unix pattern matching. -ignored-modules=vtk +ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. diff --git a/docs/pages/dependencies.rst b/docs/pages/dependencies.rst index 06db227a..718fba66 100644 --- a/docs/pages/dependencies.rst +++ b/docs/pages/dependencies.rst @@ -37,15 +37,15 @@ Testing dependencies * - Package - License and Description * - `tox `__ - - BSD License (BSD) **VTK is an open-source toolkit for 3D computer graphics, image processing, and visualization** + - MIT License (MIT) **tox is a generic virtualenv management and test command line tool** * - `coverage `__ - - BSD License (BSD) **NumPy is the fundamental package for array computing with Python** + - Apache Software License (Apache-2.0) **Code coverage measurement for Python** * - `pylint `__ - - MIT License (MIT) **YAML parser and emitter for Python** + - GNU General Public License (GPLv2) **Python code static checker** * - `pytest `__ - - MIT License (MIT) **Python bindings for the Brotli compression library** + - MIT License (MIT) **pytest: simple powerful testing with Python** * - `anybadge `__ - - MIT License (MIT) **Simple data validation library** + - MIT License (MIT) **Simple, flexible badge generator for project badges.** Documentation dependencies -------------------------- @@ -57,7 +57,7 @@ Documentation dependencies * - Package - License and Description * - `Jinja2 `__ - - BSD License (BSD) **VTK is an open-source toolkit for 3D computer graphics, image processing, and visualization** + - BSD License (BSD) **A very fast and expressive template engine.** * - `Pygments `__ - BSD License (BSD) **Pygments is a syntax highlighting package written in Python** * - `docutils `__ diff --git a/setup.cfg b/setup.cfg index 9163627b..af3540d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -41,9 +41,8 @@ where = src [options.entry_points] console_scripts = - # Applications + # Application lbaf = lbaf:run - lbaf-move-counts-viewer = lbaf:move_counts_viewer # Utils lbaf-csv-2-json-converter = lbaf:csv_2_json_converter lbaf-vt-data-extractor = lbaf:vt_data_extractor diff --git a/src/lbaf/Applications/MoveCountsViewer.py b/src/lbaf/Applications/MoveCountsViewer.py deleted file mode 100644 index 80f8b2e0..00000000 --- a/src/lbaf/Applications/MoveCountsViewer.py +++ /dev/null @@ -1,407 +0,0 @@ -# -#@HEADER -############################################################################### -# -# MoveCountsViewer.py -# DARMA/LB-analysis-framework => LB Analysis Framework -# -# Copyright 2019-2024 National Technology & Engineering Solutions of Sandia, LLC -# (NTESS). Under the terms of Contract DE-NA0003525 with NTESS, the U.S. -# Government retains certain rights in this software. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# -# * Neither the name of the copyright holder nor the names of its -# contributors may be used to endorse or promote products derived from this -# software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Questions? Contact darma@sandia.gov -# -############################################################################### -#@HEADER -# -import os -import sys -import csv -import importlib -try: - import vtk - using_vtk = True -except ModuleNotFoundError: - using_vtk = False - -# pylint:disable=C0413:wrong-import-position -# Use lbaf module from source if lbaf package is not installed -if importlib.util.find_spec('lbaf') is None: - sys.path.insert(0, f"{os.sep}".join(os.path.abspath(__file__).split(os.sep)[:-3])) -from lbaf import PROJECT_PATH -from lbaf.Utils.lbsLogging import get_logger, Logger -from lbaf.Utils.lbsArgumentParser import PromptArgumentParser -# pylint:enable=C0413:wrong-import-position - -class MoveCountsViewerParameters: - """A class to describe MoveCountsViewer parameters.""" - - def __init__(self, interactive): - # Set renderer parameters - self.renderer_background = [1, 1, 1] - - # Set actor_vertices parameters - self.actor_vertices_screen_size = 50 if interactive else 5000 - self.actor_vertices_color = [0, 0, 0] - self.actor_vertices_opacity = .3 if interactive else .5 - - # Set actor_labels parameters - self.actor_labels_color = [0, 0, 0] - self.actor_labels_font_size = 16 if interactive else 150 - self.actor_edges_opacity = .5 if interactive else 1 - self.actor_edges_line_width = 2 if interactive else 15 - - # Set actor_arrows parameters - self.actor_arrows_edge_glyph_position = .5 - self.actor_arrows_source_scale = .075 - - # Set actor_bar parameters - self.actor_bar_number_of_labels = 2 - self.actor_bar_width = .2 - self.actor_bar_heigth = .08 - self.actor_bar_position = [.4, .91] - self.actor_bar_title_color = [0, 0, 0] - self.actor_bar_label_color = [0, 0, 0] - - # Set window parameters - self.window_size_x = 600 - self.window_size_y = 600 - - # Set wti (WindowToImageFilter) parameters - self.wti_scale = 10 - - -class MoveCountsViewer: - """MoveCountsViewer class""" - - def __init__(self): - self.__args: dict = None - self.__logger: Logger = get_logger() - - def __parse_args(self): - """Parse arguments.""" - parser = PromptArgumentParser(allow_abbrev=False, description="MoveCountsViewer", prompt_default=True) - parser.add_argument("-p", "--n-processors", help="number of processors", default=8, type=int) - parser.add_argument("-f", "--input-file-name", help="input file name", - default=os.path.join(PROJECT_PATH, "data", "nolb-data", "data")) - parser.add_argument("-s", "--input-file-suffix", help="input file suffix", default="vom") - parser.add_argument("-o", "--output-file-name", help="output file name", - default=os.path.join(PROJECT_PATH, "output", "move_counts")) - parser.add_argument("-t", "--output-file-suffix", help="output file suffix", default=8) - parser.add_argument("-i", "--interactive", type=bool, help="interactive call", default=False) - self.__args = parser.parse_args() - - def check_args(self) -> bool: - """Validate input arguments.""" - if self.__args.output_file_name is None: - self.__args.output_file_name = self.__args.input_file_name - # If number of processors is not provided or set to 0 - if self.__args.n_processors == 0: - self.__logger.error("At least one processor needs to be defined. Exiting.") - return False - # If invalid file name is provided - elif (not self.__args.input_file_name.strip() or self.__args.input_file_name.strip() == "''"): - self.__logger.error("A file name needs to be defined. Exiting.") - return False - return True - - def compute_move_counts_viewer(self): - """Compute MoveCountsViewer.""" - - # Instantiate MoveCountsViewerParameters - viewer_params = MoveCountsViewerParameters(self.__args.interactive) - - # Create storage for vertex values - vertex_name = "Node ID" - vertex_data = vtk.vtkIntArray() - vertex_data.SetName(vertex_name) - - # Create a directed graph with one vertex per processor - # and two sets of edges - graph = vtk.vtkMutableDirectedGraph() - graph.GetVertexData().AddArray(vertex_data) - graph.GetVertexData().SetActiveScalars(vertex_name) - - # Populate graph vertices - for i in range(self.__args.n_processors): - vertex_data.InsertNextValue(i) - graph.AddVertex() - - # Compute directed move counts - directed_moves = {} - # directed_sizes = {} (unused) - for i in range(self.__args.n_processors): - # Iterate over all files - with open( - f"{self.__args.input_file_name}.{i}.{self.__args.input_file_suffix}", 'r', encoding="utf-8" - ) as input_file: - # Instantiate CSV reader - reader = csv.reader(input_file, delimiter=",") - - # Iterate over rows of processor file - for row in reader: - # Retrieve source node ID - src_id = int(row[0]) - # src_sz = float(row[2]) (unused) - - # Add edge when source != destination - if src_id != i: - directed_moves[(src_id, i)] = directed_moves.get( - (src_id, i), 0) + 1 - # Compute undirected move counts - undirected_moves = { - (i, j): directed_moves.get((i, j), 0) + directed_moves.get( - (j, i), 0) - for (i, j), v in directed_moves.items()} - - # Keep track of extremal values - move_range = (min(directed_moves.values()), - max(undirected_moves.values())) - - # Attach directed moves storage to edges - directed_moves_name = "Directed Move Counts" - directed_moves_edge = vtk.vtkIntArray() - directed_moves_edge.SetName(directed_moves_name) - graph.GetEdgeData().AddArray(directed_moves_edge) - graph.GetEdgeData().SetActiveScalars(directed_moves_name) - - # Attach undirected moves storage to edges - undirected_moves_name = "Undirected Move Counts" - undirected_moves_edge = vtk.vtkIntArray() - undirected_moves_edge.SetName(undirected_moves_name) - graph.GetEdgeData().AddArray(undirected_moves_edge) - - # Populate all edge data - for (k_s, k_d), v in directed_moves.items(): - graph.AddGraphEdge(k_s, k_d) - directed_moves_edge.InsertNextValue(v) - undirected_moves_edge.InsertNextValue(undirected_moves.get( - (k_s, k_d))) - - # Create renderer - renderer = vtk.vtkRenderer() - renderer.SetBackground(viewer_params.renderer_background) - renderer.GradientBackgroundOff() - - # Create graph vertex layout - layout_vertices = vtk.vtkGraphLayout() - layout_vertices.SetInputData(graph) - layout_vertices.SetLayoutStrategy(vtk.vtkSimple2DLayoutStrategy()) - - # Graph to vertex and square glyphs - local_glyph_types = { - 0: vtk.vtkGraphToGlyphs.VERTEX, - 1: vtk.vtkGraphToGlyphs.SQUARE} - glyphs = [] - for k, v in local_glyph_types.items(): - gtg = vtk.vtkGraphToGlyphs() - gtg.SetInputConnection(layout_vertices.GetOutputPort()) - gtg.SetGlyphType(v) - gtg.SetRenderer(renderer) - if k: - gtg.SetScreenSize(viewer_params.actor_vertices_screen_size) - gtg.FilledOn() - glyphs.append(gtg) - - # Square vertex mapper and actor - mapper_vertices = vtk.vtkPolyDataMapper() - mapper_vertices.SetInputConnection(glyphs[1].GetOutputPort()) - mapper_vertices.ScalarVisibilityOff() - actor_vertices = vtk.vtkActor() - actor_vertices.SetMapper(mapper_vertices) - actor_vertices.GetProperty().SetColor( - viewer_params.actor_vertices_color) - actor_vertices.GetProperty().SetOpacity( - viewer_params.actor_vertices_opacity) - renderer.AddViewProp(actor_vertices) - - # Vertex labels - labels = vtk.vtkLabeledDataMapper() - labels.SetInputConnection(glyphs[0].GetOutputPort()) - labels.SetLabelModeToLabelFieldData() - labels.SetFieldDataName(vertex_name) - actor_labels = vtk.vtkActor2D() - actor_labels.SetMapper(labels) - l_props = labels.GetLabelTextProperty() - l_props.SetJustificationToCentered() - l_props.SetVerticalJustificationToCentered() - l_props.SetColor(viewer_params.actor_labels_color) - l_props.SetFontSize(viewer_params.actor_labels_font_size) - l_props.BoldOn() - l_props.ItalicOff() - renderer.AddViewProp(actor_labels) - - # Create directed edge layout - layout_directed_edges = vtk.vtkEdgeLayout() - layout_directed_edges.SetInputConnection( - layout_vertices.GetOutputPort()) - layout_directed_edges.SetLayoutStrategy( - vtk.vtkPassThroughEdgeStrategy()) - - # Directed graph to edge lines - directed_edges = vtk.vtkGraphToPolyData() - directed_edges.SetInputConnection(layout_directed_edges.GetOutputPort()) - directed_edges.EdgeGlyphOutputOn() - directed_edges.SetEdgeGlyphPosition( - viewer_params.actor_arrows_edge_glyph_position) - - # Arrow source and glyph - arrow_source = vtk.vtkGlyphSource2D() - arrow_source.SetGlyphTypeToEdgeArrow() - arrow_source.SetScale(viewer_params.actor_arrows_source_scale) - arrow_glyph = vtk.vtkGlyph3D() - arrow_glyph.SetInputConnection(0, directed_edges.GetOutputPort(1)) - arrow_glyph.SetInputConnection(1, arrow_source.GetOutputPort()) - arrow_glyph.ScalingOff() - arrow_glyph.SetColorModeToColorByScalar() - - # Arrow mapper and actor - mapper_arrows = vtk.vtkPolyDataMapper() - mapper_arrows.SetInputConnection(arrow_glyph.GetOutputPort()) - mapper_arrows.SetScalarRange(move_range) - actor_arrows = vtk.vtkActor() - actor_arrows.SetMapper(mapper_arrows) - renderer.AddViewProp(actor_arrows) - - # Create undirected edge layout - layout_undirected_edges = vtk.vtkEdgeLayout() - layout_undirected_edges.SetInputConnection( - layout_vertices.GetOutputPort()) - layout_undirected_edges.SetLayoutStrategy( - vtk.vtkPassThroughEdgeStrategy()) - - # Undirected graph to edge lines - undirected_edges = vtk.vtkGraphToPolyData() - undirected_edges.SetInputConnection( - layout_undirected_edges.GetOutputPort()) - - # NB: This is a workaround for a bug in VTK7, cf. below - undirected_edges.Update() - undirected_edges.GetOutput().GetCellData().SetActiveScalars( - undirected_moves_name) - - # Undirected edge mapper and actor - mapper_edges = vtk.vtkPolyDataMapper() - mapper_edges.SetInputConnection(undirected_edges.GetOutputPort()) - mapper_edges.SetScalarRange(move_range) - mapper_edges.SetColorModeToMapScalars() - mapper_edges.SetScalarModeToUseCellData() - # The line below should be used in the absence of the VTK7 bug - # mapper_edges.SetArrayName(undirected_moves_name) - mapper_edges.SelectColorArray(undirected_moves_name) - actor_edges = vtk.vtkActor() - actor_edges.SetMapper(mapper_edges) - actor_edges.GetProperty().SetOpacity( - viewer_params.actor_edges_opacity) - actor_edges.GetProperty().SetLineWidth( - viewer_params.actor_edges_line_width) - renderer.AddViewProp(actor_edges) - - # Reset camera to set it up based on edge actor - renderer.ResetCamera() - - # Scalar bar actor - actor_bar = vtk.vtkScalarBarActor() - actor_bar.SetLookupTable(mapper_edges.GetLookupTable()) - actor_bar.SetTitle("Object Moves") - actor_bar.SetOrientationToHorizontal() - actor_bar.SetNumberOfLabels(viewer_params.actor_bar_number_of_labels) - actor_bar.SetWidth(viewer_params.actor_bar_width) - actor_bar.SetHeight(viewer_params.actor_bar_heigth) - actor_bar.GetPositionCoordinate().SetCoordinateSystemToNormalizedViewport() - actor_bar.GetPositionCoordinate().SetValue( - viewer_params.actor_bar_position[0], - viewer_params.actor_bar_position[1]) - actor_bar.GetTitleTextProperty().SetColor( - viewer_params.actor_bar_title_color) - actor_bar.GetLabelTextProperty().SetColor( - viewer_params.actor_bar_label_color) - actor_bar.SetLabelFormat("%g") - renderer.AddViewProp(actor_bar) - - # Render window - window = vtk.vtkRenderWindow() - window.AddRenderer(renderer) - window.SetSize(viewer_params.window_size_x, viewer_params.window_size_y) - window.SetAlphaBitPlanes(True) - window.SetMultiSamples(0) - - # Run interactive MoveCountsViewer if demanded - if self.__args.interactive: - # Render and interact - interactor = vtk.vtkRenderWindowInteractor() - interactor.SetRenderWindow(window) - window.Render() - interactor.Start() - - # Save viewer in output file format otherwise - else: - # Window to image - wti = vtk.vtkWindowToImageFilter() - window.Render() - wti.SetInput(window) - # Set high scale for image quality - wti.SetScale(viewer_params.wti_scale) - # Save with alpha channel for transparency - wti.SetInputBufferTypeToRGBA() - - # Write PNG image - writer = vtk.vtkPNGWriter() - writer.SetInputConnection(wti.GetOutputPort()) - writer.SetFileName( - f"{self.__args.output_file_name}.{self.__args.output_file_suffix}") - writer.Write() - - def run(self): - # Raise error if vtk is not installed - if not using_vtk: - raise ModuleNotFoundError("Could not find vtk module, which is required for the MoveCountsViewer.") - - """Run the MoveCountViewer logic.""" - # Parse command line arguments - self.__parse_args() - - # Print startup information - svi = sys.version_info - self.__logger.info(f"### Started with Python {svi.major}.{svi.minor}.{svi.micro}") - - # Parse command line arguments - self.__parse_args() - if not self.check_args(): - return 1 - - self.__logger.info("# Parsing command line arguments") - - # Execute viewer - self.compute_move_counts_viewer() - return 0 - - -if __name__ == "__main__": - MoveCountsViewer().run() diff --git a/src/lbaf/__init__.py b/src/lbaf/__init__.py index 0c36bb4a..fd420531 100644 --- a/src/lbaf/__init__.py +++ b/src/lbaf/__init__.py @@ -60,7 +60,6 @@ from lbaf.Applications.LBAF_app import LBAFApplication from lbaf.Utils.lbsVTDataExtractor import VTDataExtractorRunner from lbaf.Utils.lbsJSONDataFilesValidatorLoader import JSONDataFilesValidatorLoader -from lbaf.Applications.MoveCountsViewer import MoveCountsViewer from lbaf.Utils.lbsCsv2JsonDataConverter import Csv2JsonConverter from lbaf.Utils.lbsDataStatFilesUpdater import DataStatFilesUpdater from lbaf.Utils.lbsJSONDataFilesMaker import JSONDataFilesMaker @@ -93,10 +92,6 @@ def vt_data_files_maker() -> int: """Run a JSONDataFilesMaker instance.""" return JSONDataFilesMaker().run() -def move_counts_viewer(): - """Run a MoveCountsViewer instance.""" - return MoveCountsViewer().run() - def csv_2_json_converter() -> int: """Run a Csv2JsonConverter instance.""" return Csv2JsonConverter().run()