diff --git a/dump/plugins/portchannel.py b/dump/plugins/portchannel.py new file mode 100644 index 000000000000..46022dc5f901 --- /dev/null +++ b/dump/plugins/portchannel.py @@ -0,0 +1,113 @@ +from dump.match_infra import MatchRequest +from dump.helper import create_template_dict +from .executor import Executor + + +class Portchannel(Executor): + """ + Debug Dump Plugin for PortChannel/LAG Module + """ + ARG_NAME = "portchannel_name" + + def __init__(self, match_engine=None): + super().__init__(match_engine) + self.ret_temp = {} + self.ns = '' + self.lag_members = set() + + def get_all_args(self, ns=""): + req = MatchRequest(db="CONFIG_DB", table="PORTCHANNEL", key_pattern="*", ns=ns) + ret = self.match_engine.fetch(req) + all_lags = ret["keys"] + return [key.split("|")[-1] for key in all_lags] + + def execute(self, params_dict): + self.ret_temp = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + self.lag_name = params_dict[Portchannel.ARG_NAME] + self.ns = params_dict["namespace"] + # CONFIG_DB + lag_found = self.init_lag_config_info() + if lag_found: + self.init_lag_member_config_info() + # APPL_DB + self.init_lag_appl_info() + # STATE_DB + self.init_lag_state_info() + # ASIC_DB + lag_type_objs_asic = self.init_lag_member_type_obj_asic_info() + self.init_lag_asic_info(lag_type_objs_asic) + return self.ret_temp + + def add_to_ret_template(self, table, db, keys, err): + if not err and keys: + self.ret_temp[db]["keys"].extend(keys) + return True + else: + self.ret_temp[db]["tables_not_found"].extend([table]) + return False + + def init_lag_config_info(self): + req = MatchRequest(db="CONFIG_DB", table="PORTCHANNEL", key_pattern=self.lag_name, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_member_config_info(self): + req = MatchRequest(db="CONFIG_DB", table="PORTCHANNEL_MEMBER", key_pattern=self.lag_name + "|*", ns=self.ns) + ret = self.match_engine.fetch(req) + for key in ret["keys"]: + self.lag_members.add(key.split("|")[-1]) + + def init_lag_appl_info(self): + req = MatchRequest(db="APPL_DB", table="LAG_TABLE", key_pattern=self.lag_name, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_state_info(self): + req = MatchRequest(db="STATE_DB", table="LAG_TABLE", key_pattern=self.lag_name, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_asic_info(self, lag_type_objs_asic): + if len(lag_type_objs_asic) == 0: + self.ret_temp["ASIC_DB"]["tables_not_found"].extend(["ASIC_STATE:SAI_OBJECT_TYPE_LAG"]) + return + for lag_asic_obj in lag_type_objs_asic: + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_LAG", key_pattern=lag_asic_obj, ns=self.ns) + ret = self.match_engine.fetch(req) + self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_member_type_obj_asic_info(self): + """ + Finding the relevant SAI_OBJECT_TYPE_LAG key directly from the ASIC is not possible given a LAG name + Thus, using the members to find SAI_LAG_MEMBER_ATTR_LAG_ID + """ + lag_type_objs_asic = set() + for port_name in self.lag_members: + port_asic_obj = self.get_port_asic_obj(port_name) + if port_asic_obj: + lag_member_key, lag_oid = self.get_lag_and_member_obj(port_asic_obj) + lag_type_objs_asic.add(lag_oid) + return lag_type_objs_asic + + def get_port_asic_obj(self, port_name): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", key_pattern="*", field="SAI_HOSTIF_ATTR_NAME", + value=port_name, return_fields=["SAI_HOSTIF_ATTR_OBJ_ID"], ns=self.ns) + ret = self.match_engine.fetch(req) + asic_port_obj_id = "" + if not ret["error"] and ret["keys"]: + sai_hostif_obj_key = ret["keys"][-1] + if sai_hostif_obj_key in ret["return_values"] and "SAI_HOSTIF_ATTR_OBJ_ID" in ret["return_values"][sai_hostif_obj_key]: + asic_port_obj_id = ret["return_values"][sai_hostif_obj_key]["SAI_HOSTIF_ATTR_OBJ_ID"] + return asic_port_obj_id + + def get_lag_and_member_obj(self, port_asic_obj): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER", key_pattern="*", field="SAI_LAG_MEMBER_ATTR_PORT_ID", + value=port_asic_obj, return_fields=["SAI_LAG_MEMBER_ATTR_LAG_ID"], ns=self.ns) + ret = self.match_engine.fetch(req) + lag_member_key = "" + lag_oid = "" + if not ret["error"] and ret["keys"]: + lag_member_key = ret["keys"][-1] + if lag_member_key in ret["return_values"] and "SAI_LAG_MEMBER_ATTR_LAG_ID" in ret["return_values"][lag_member_key]: + lag_oid = ret["return_values"][lag_member_key]["SAI_LAG_MEMBER_ATTR_LAG_ID"] + return lag_member_key, lag_oid diff --git a/dump/plugins/portchannel_member.py b/dump/plugins/portchannel_member.py new file mode 100644 index 000000000000..8c0ec52795b4 --- /dev/null +++ b/dump/plugins/portchannel_member.py @@ -0,0 +1,84 @@ +from dump.match_infra import MatchRequest +from dump.helper import create_template_dict +from .executor import Executor + +class Portchannel_Member(Executor): + """ + Debug Dump Plugin for PortChannel/LAG Module + """ + ARG_NAME = "portchannel_member" + + def __init__(self, match_engine=None): + super().__init__(match_engine) + self.ret_temp = {} + self.ns = '' + self.lag_member_key = '' + self.lag = '' + self.port_name = '' + + def get_all_args(self, ns=""): + req = MatchRequest(db="CONFIG_DB", table="PORTCHANNEL_MEMBER", key_pattern="*", ns=ns) + ret = self.match_engine.fetch(req) + all_lag_members = ret["keys"] + return [key.split("|", 1)[-1] for key in all_lag_members] + + def execute(self, params_dict): + self.ret_temp = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + self.lag_member_key = params_dict[Portchannel_Member.ARG_NAME] + if "|" not in self.lag_member_key: + return self.ret_temp + self.lag, self.port_name = self.lag_member_key.split("|", 1) + self.ns = params_dict["namespace"] + # CONFIG_DB + self.init_lag_member_config_info() + # APPL_DB + self.init_lag_member_appl_info() + # STATE_DB + self.init_lag_member_state_info() + # ASIC_DB + self.init_lag_member_type_obj_asic_info() + return self.ret_temp + + def add_to_ret_template(self, table, db, keys, err): + if not err and keys: + self.ret_temp[db]["keys"].extend(keys) + return True + else: + self.ret_temp[db]["tables_not_found"].extend([table]) + return False + + def init_lag_member_config_info(self): + req = MatchRequest(db="CONFIG_DB", table="PORTCHANNEL_MEMBER", key_pattern=self.lag_member_key, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_member_appl_info(self): + req = MatchRequest(db="APPL_DB", table="LAG_MEMBER_TABLE", key_pattern=self.lag + ":" + self.port_name, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_member_state_info(self): + req = MatchRequest(db="STATE_DB", table="LAG_MEMBER_TABLE", key_pattern=self.lag_member_key, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def init_lag_member_type_obj_asic_info(self): + port_asic_obj = self.get_port_asic_obj(self.port_name) + if not port_asic_obj: + self.ret_temp["ASIC_DB"]["tables_not_found"].extend(["ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER"]) + return False + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER", key_pattern="*", field="SAI_LAG_MEMBER_ATTR_PORT_ID", + value=port_asic_obj, ns=self.ns) + ret = self.match_engine.fetch(req) + return self.add_to_ret_template(req.table, req.db, ret["keys"], ret["error"]) + + def get_port_asic_obj(self, port_name): + req = MatchRequest(db="ASIC_DB", table="ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF", key_pattern="*", field="SAI_HOSTIF_ATTR_NAME", + value=port_name, return_fields=["SAI_HOSTIF_ATTR_OBJ_ID"], ns=self.ns) + ret = self.match_engine.fetch(req) + asic_port_obj_id = "" + if not ret["error"] and ret["keys"]: + sai_hostif_obj_key = ret["keys"][-1] + if sai_hostif_obj_key in ret["return_values"] and "SAI_HOSTIF_ATTR_OBJ_ID" in ret["return_values"][sai_hostif_obj_key]: + asic_port_obj_id = ret["return_values"][sai_hostif_obj_key]["SAI_HOSTIF_ATTR_OBJ_ID"] + return asic_port_obj_id diff --git a/tests/dump_input/portchannel/appl_db.json b/tests/dump_input/portchannel/appl_db.json new file mode 100644 index 000000000000..b273df0bdab1 --- /dev/null +++ b/tests/dump_input/portchannel/appl_db.json @@ -0,0 +1,18 @@ +{ + "LAG_TABLE:PortChannel002": { + "oper_status": "up", + "mtu": "9100", + "admin_status": "up" + }, + "LAG_TABLE:PortChannel001": { + "oper_status": "up", + "mtu": "9100", + "admin_status": "up" + }, + "LAG_MEMBER_TABLE:PortChannel001:Ethernet4": { + "status": "enabled" + }, + "LAG_MEMBER_TABLE:PortChannel001:Ethernet0": { + "status": "enabled" + } +} diff --git a/tests/dump_input/portchannel/asic_db.json b/tests/dump_input/portchannel/asic_db.json new file mode 100644 index 000000000000..95c6ee072c52 --- /dev/null +++ b/tests/dump_input/portchannel/asic_db.json @@ -0,0 +1,34 @@ +{ + "ASIC_STATE:SAI_OBJECT_TYPE_LAG:oid:0x2000000000d17":{ + "SAI_LAG_ATTR_PORT_VLAN_ID": "1" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_LAG:oid:0x20000000004d3": { + "NULL": "NULL" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER:oid:0x1b000000000d18": { + "SAI_LAG_MEMBER_ATTR_EGRESS_DISABLE": "false", + "SAI_LAG_MEMBER_ATTR_INGRESS_DISABLE": "false", + "SAI_LAG_MEMBER_ATTR_LAG_ID": "oid:0x2000000000d17", + "SAI_LAG_MEMBER_ATTR_PORT_ID": "oid:0x10000000002cc" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER:oid:0x1b000000000d1a": { + "SAI_LAG_MEMBER_ATTR_EGRESS_DISABLE": "false", + "SAI_LAG_MEMBER_ATTR_INGRESS_DISABLE": "false", + "SAI_LAG_MEMBER_ATTR_LAG_ID": "oid:0x2000000000d17", + "SAI_LAG_MEMBER_ATTR_PORT_ID": "oid:0x100000000093e" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000cdf": { + "SAI_HOSTIF_ATTR_NAME": "Ethernet0", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x10000000002cc", + "SAI_HOSTIF_ATTR_OPER_STATUS": "true", + "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP" + }, + "ASIC_STATE:SAI_OBJECT_TYPE_HOSTIF:oid:0xd000000000acd": { + "SAI_HOSTIF_ATTR_NAME": "Ethernet4", + "SAI_HOSTIF_ATTR_OBJ_ID": "oid:0x100000000093e", + "SAI_HOSTIF_ATTR_OPER_STATUS": "true", + "SAI_HOSTIF_ATTR_TYPE": "SAI_HOSTIF_TYPE_NETDEV", + "SAI_HOSTIF_ATTR_VLAN_TAG": "SAI_HOSTIF_VLAN_TAG_STRIP" + } +} diff --git a/tests/dump_input/portchannel/config_db.json b/tests/dump_input/portchannel/config_db.json new file mode 100644 index 000000000000..b056e625cd4a --- /dev/null +++ b/tests/dump_input/portchannel/config_db.json @@ -0,0 +1,29 @@ +{ + "PORTCHANNEL|PortChannel001": { + "admin_status": "up", + "lacp_key": "auto", + "min_links": "1", + "mtu": "9100" + }, + "PORTCHANNEL|PortChannel002": { + "admin_status": "up", + "lacp_key": "auto", + "min_links": "1", + "mtu": "9100" + }, + "PORTCHANNEL|PortChannel003": { + "admin_status": "up", + "lacp_key": "auto", + "min_links": "1", + "mtu": "9100" + }, + "PORTCHANNEL_MEMBER|PortChannel001|Ethernet0": { + "NULL" : "NULL" + }, + "PORTCHANNEL_MEMBER|PortChannel001|Ethernet4": { + "NULL" : "NULL" + }, + "PORTCHANNEL_MEMBER|PortChannel001|Ethernet8": { + "NULL" : "NULL" + } +} diff --git a/tests/dump_input/portchannel/state_db.json b/tests/dump_input/portchannel/state_db.json new file mode 100644 index 000000000000..61a55f065650 --- /dev/null +++ b/tests/dump_input/portchannel/state_db.json @@ -0,0 +1,54 @@ +{ + "LAG_TABLE|PortChannel001":{ + "runner.active": "true", + "runner.fallback": "false", + "runner.fast_rate": "false", + "setup.kernel_team_mode_name": "loadbalance", + "setup.pid": "27", + "state": "ok", + "team_device.ifinfo.dev_addr": "1c:34:da:1c:9f:00", + "team_device.ifinfo.ifindex": "137" + }, + "LAG_TABLE|PortChannel002":{ + "runner.active": "true", + "runner.fallback": "false", + "runner.fast_rate": "false", + "setup.kernel_team_mode_name": "loadbalance", + "setup.pid": "35", + "state": "ok", + "team_device.ifinfo.dev_addr": "1c:34:da:1c:9f:00", + "team_device.ifinfo.ifindex": "138" + }, + "LAG_MEMBER_TABLE|PortChannel001|Ethernet4":{ + "ifinfo.dev_addr": "1c:34:da:1c:9f:00", + "ifinfo.ifindex": "203", + "link.up": "true", + "link_watches.list.link_watch_0.up": "true", + "runner.actor_lacpdu_info.port": "5", + "runner.actor_lacpdu_info.state": "69", + "runner.actor_lacpdu_info.system": "1c:34:da:1c:9f:00", + "runner.aggregator.id": "0", + "runner.aggregator.selected": "false", + "runner.partner_lacpdu_info.port": "0", + "runner.partner_lacpdu_info.state": "2", + "runner.partner_lacpdu_info.system": "00:00:00:00:00:00", + "runner.selected": "false", + "runner.state": "defaulted" + }, + "LAG_MEMBER_TABLE|PortChannel001|Ethernet0":{ + "ifinfo.dev_addr": "1c:34:da:1c:9f:00", + "ifinfo.ifindex": "202", + "link.up": "true", + "link_watches.list.link_watch_0.up": "true", + "runner.actor_lacpdu_info.port": "0", + "runner.actor_lacpdu_info.state": "69", + "runner.actor_lacpdu_info.system": "1c:34:da:1c:9f:00", + "runner.aggregator.id": "0", + "runner.aggregator.selected": "false", + "runner.partner_lacpdu_info.port": "0", + "runner.partner_lacpdu_info.state": "2", + "runner.partner_lacpdu_info.system": "00:00:00:00:00:00", + "runner.selected": "false", + "runner.state": "defaulted" + } +} diff --git a/tests/dump_tests/module_tests/portchannel_member_test.py b/tests/dump_tests/module_tests/portchannel_member_test.py new file mode 100644 index 000000000000..e4e0f7558b8a --- /dev/null +++ b/tests/dump_tests/module_tests/portchannel_member_test.py @@ -0,0 +1,107 @@ +import json, os, sys +import jsonpatch +import unittest +import pytest +from deepdiff import DeepDiff +from mock import patch +from dump.helper import create_template_dict, sort_lists +from dump.plugins.portchannel_member import Portchannel_Member +from dump.match_infra import MatchEngine, ConnectionPool +from swsscommon.swsscommon import SonicV2Connector + + +# Location for dedicated db's used for UT +module_tests_path = os.path.dirname(__file__) +dump_tests_path = os.path.join(module_tests_path, "../") +tests_path = os.path.join(dump_tests_path, "../") +dump_test_input = os.path.join(tests_path, "dump_input") +port_files_path = os.path.join(dump_test_input, "portchannel") + +# Define the mock files to read from +dedicated_dbs = {} +dedicated_dbs['CONFIG_DB'] = os.path.join(port_files_path, "config_db.json") +dedicated_dbs['APPL_DB'] = os.path.join(port_files_path, "appl_db.json") +dedicated_dbs['ASIC_DB'] = os.path.join(port_files_path, "asic_db.json") +dedicated_dbs['STATE_DB'] = os.path.join(port_files_path, "state_db.json") + + +def populate_mock(db, db_names): + for db_name in db_names: + db.connect(db_name) + # Delete any default data + db.delete_all_by_pattern(db_name, "*") + with open(dedicated_dbs[db_name]) as f: + mock_json = json.load(f) + for key in mock_json: + for field, value in mock_json[key].items(): + db.set(db_name, key, field, value) + + +@pytest.fixture(scope="class", autouse=True) +def match_engine(): + + print("SETUP") + os.environ["VERBOSE"] = "1" + + # Monkey Patch the SonicV2Connector Object + from ...mock_tables import dbconnector + db = SonicV2Connector() + + # popualate the db with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(db, db_names) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + # Initialize connection pool + conn_pool = ConnectionPool() + DEF_NS = '' # Default Namespace + conn_pool.cache = {DEF_NS: {'conn': db, + 'connected_to': set(db_names)}} + + # Initialize match_engine + match_engine = MatchEngine(conn_pool) + yield match_engine + print("TEARDOWN") + os.environ["VERBOSE"] = "0" + + +@pytest.mark.usefixtures("match_engine") +class TestPortChannelMemberModule: + def test_get_all_args(self, match_engine): + """ + Scenario: Verify Whether the get_all_args method is working as expected + """ + m_lag_member = Portchannel_Member(match_engine) + returned = m_lag_member.get_all_args("") + expect = ["PortChannel001|Ethernet0", "PortChannel001|Ethernet4", "PortChannel001|Ethernet8"] + ddiff = DeepDiff(expect, returned, ignore_order=True) + assert not ddiff, ddiff + + def test_missing_appl_state_asic(self, match_engine): + ''' + Scenario: When the LAG is configured but the Change is not propagated + ''' + params = {Portchannel_Member.ARG_NAME:"PortChannel001|Ethernet8", "namespace":""} + m_lag_member = Portchannel_Member(match_engine) + returned = m_lag_member.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORTCHANNEL_MEMBER|PortChannel001|Ethernet8") + expect["APPL_DB"]["tables_not_found"].append("LAG_MEMBER_TABLE") + expect["STATE_DB"]["tables_not_found"].append("LAG_MEMBER_TABLE") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_working_config(self, match_engine): + params = {Portchannel_Member.ARG_NAME:"PortChannel001|Ethernet0", "namespace":""} + m_lag_member = Portchannel_Member(match_engine) + returned = m_lag_member.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORTCHANNEL_MEMBER|PortChannel001|Ethernet0") + expect["APPL_DB"]["keys"].append("LAG_MEMBER_TABLE:PortChannel001:Ethernet0") + expect["STATE_DB"]["keys"].append("LAG_MEMBER_TABLE|PortChannel001|Ethernet0") + expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_LAG_MEMBER:oid:0x1b000000000d18") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff diff --git a/tests/dump_tests/module_tests/portchannel_test.py b/tests/dump_tests/module_tests/portchannel_test.py new file mode 100644 index 000000000000..31c5acf077d4 --- /dev/null +++ b/tests/dump_tests/module_tests/portchannel_test.py @@ -0,0 +1,128 @@ +import json, os, sys +import jsonpatch +import unittest +import pytest +from deepdiff import DeepDiff +from mock import patch +from dump.helper import create_template_dict, sort_lists +from dump.plugins.portchannel import Portchannel +from dump.match_infra import MatchEngine, ConnectionPool +from swsscommon.swsscommon import SonicV2Connector + + +# Location for dedicated db's used for UT +module_tests_path = os.path.dirname(__file__) +dump_tests_path = os.path.join(module_tests_path, "../") +tests_path = os.path.join(dump_tests_path, "../") +dump_test_input = os.path.join(tests_path, "dump_input") +port_files_path = os.path.join(dump_test_input, "portchannel") + +# Define the mock files to read from +dedicated_dbs = {} +dedicated_dbs['CONFIG_DB'] = os.path.join(port_files_path, "config_db.json") +dedicated_dbs['APPL_DB'] = os.path.join(port_files_path, "appl_db.json") +dedicated_dbs['ASIC_DB'] = os.path.join(port_files_path, "asic_db.json") +dedicated_dbs['STATE_DB'] = os.path.join(port_files_path, "state_db.json") + + +def populate_mock(db, db_names): + for db_name in db_names: + db.connect(db_name) + # Delete any default data + db.delete_all_by_pattern(db_name, "*") + with open(dedicated_dbs[db_name]) as f: + mock_json = json.load(f) + for key in mock_json: + for field, value in mock_json[key].items(): + db.set(db_name, key, field, value) + + +@pytest.fixture(scope="class", autouse=True) +def match_engine(): + + print("SETUP") + os.environ["VERBOSE"] = "1" + + # Monkey Patch the SonicV2Connector Object + from ...mock_tables import dbconnector + db = SonicV2Connector() + + # popualate the db with mock data + db_names = list(dedicated_dbs.keys()) + try: + populate_mock(db, db_names) + except Exception as e: + assert False, "Mock initialization failed: " + str(e) + + # Initialize connection pool + conn_pool = ConnectionPool() + DEF_NS = '' # Default Namespace + conn_pool.cache = {DEF_NS: {'conn': db, + 'connected_to': set(db_names)}} + + # Initialize match_engine + match_engine = MatchEngine(conn_pool) + yield match_engine + print("TEARDOWN") + os.environ["VERBOSE"] = "0" + + +@pytest.mark.usefixtures("match_engine") +class TestPortChannelModule: + def test_get_all_args(self, match_engine): + """ + Scenario: Verify Whether the get_all_args method is working as expected + """ + m_lag = Portchannel(match_engine) + returned = m_lag.get_all_args("") + expect = ["PortChannel001", "PortChannel002", "PortChannel003"] + ddiff = DeepDiff(expect, returned, ignore_order=True) + assert not ddiff, ddiff + + def test_missing_appl_state_asic(self, match_engine): + ''' + Scenario: When the LAG is configured but the Change is not propagated + ''' + params = {Portchannel.ARG_NAME:"PortChannel003", "namespace":""} + m_lag = Portchannel(match_engine) + returned = m_lag.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORTCHANNEL|PortChannel003") + expect["APPL_DB"]["tables_not_found"].append("LAG_TABLE") + expect["STATE_DB"]["tables_not_found"].append("LAG_TABLE") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_LAG") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_lag_with_no_members(self, match_engine): + ''' + Scenario: When the PortChannel doesn't have any members, + it is not possible to uniquely identify ASIC LAG Related Key + ''' + params = {Portchannel.ARG_NAME:"PortChannel002", "namespace":""} + m_lag = Portchannel(match_engine) + returned = m_lag.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORTCHANNEL|PortChannel002") + expect["APPL_DB"]["keys"].append("LAG_TABLE:PortChannel002") + expect["STATE_DB"]["keys"].append("LAG_TABLE|PortChannel002") + expect["ASIC_DB"]["tables_not_found"].append("ASIC_STATE:SAI_OBJECT_TYPE_LAG") + ddiff = DeepDiff(returned, expect, ignore_order=True) + assert not ddiff, ddiff + + def test_lag_with_members(self, match_engine): + ''' + Scenario: It should be possible to uniquely identify ASIC LAG Related Keys, + when the LAG has members + ''' + params = {Portchannel.ARG_NAME:"PortChannel001", "namespace":""} + m_lag = Portchannel(match_engine) + returned = m_lag.execute(params) + expect = create_template_dict(dbs=["CONFIG_DB", "APPL_DB", "ASIC_DB", "STATE_DB"]) + expect["CONFIG_DB"]["keys"].append("PORTCHANNEL|PortChannel001") + expect["APPL_DB"]["keys"].append("LAG_TABLE:PortChannel001") + expect["STATE_DB"]["keys"].append("LAG_TABLE|PortChannel001") + expect["ASIC_DB"]["keys"].append("ASIC_STATE:SAI_OBJECT_TYPE_LAG:oid:0x2000000000d17") + ddiff = DeepDiff(expect, returned, ignore_order=True) + print(returned, expect, ddiff) + assert not ddiff, ddiff