mirror of
https://git.proxmox.com/git/mirror_frr
synced 2025-08-07 10:49:24 +00:00
Merge pull request #8964 from LabNConsulting/chopps/config-faster
tests: configure/reset routers in parallel
This commit is contained in:
commit
032d1a65ff
@ -208,6 +208,9 @@ def test_modify_and_delete_router_id(request):
|
|||||||
tc_name = request.node.name
|
tc_name = request.node.name
|
||||||
write_test_header(tc_name)
|
write_test_header(tc_name)
|
||||||
|
|
||||||
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
# Modify router id
|
# Modify router id
|
||||||
input_dict = {
|
input_dict = {
|
||||||
"r1": {"bgp": {"router_id": "12.12.12.12"}},
|
"r1": {"bgp": {"router_id": "12.12.12.12"}},
|
||||||
@ -252,6 +255,9 @@ def test_bgp_config_with_4byte_as_number(request):
|
|||||||
tc_name = request.node.name
|
tc_name = request.node.name
|
||||||
write_test_header(tc_name)
|
write_test_header(tc_name)
|
||||||
|
|
||||||
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
input_dict = {
|
input_dict = {
|
||||||
"r1": {"bgp": {"local_as": 131079}},
|
"r1": {"bgp": {"local_as": 131079}},
|
||||||
"r2": {"bgp": {"local_as": 131079}},
|
"r2": {"bgp": {"local_as": 131079}},
|
||||||
@ -283,6 +289,9 @@ def test_BGP_config_with_invalid_ASN_p2(request):
|
|||||||
tc_name = request.node.name
|
tc_name = request.node.name
|
||||||
write_test_header(tc_name)
|
write_test_header(tc_name)
|
||||||
|
|
||||||
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
# Api call to modify AS number
|
# Api call to modify AS number
|
||||||
input_dict = {
|
input_dict = {
|
||||||
"r1": {
|
"r1": {
|
||||||
@ -307,11 +316,18 @@ def test_BGP_config_with_invalid_ASN_p2(request):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
result = modify_as_number(tgen, topo, input_dict)
|
result = modify_as_number(tgen, topo, input_dict)
|
||||||
try:
|
assert result is not True, (
|
||||||
assert result is True
|
"Expected BGP config is not created because of invalid ASNs: {}".format(
|
||||||
except AssertionError:
|
result
|
||||||
logger.info("Expected behaviour: {}".format(result))
|
)
|
||||||
logger.info("BGP config is not created because of invalid ASNs")
|
)
|
||||||
|
|
||||||
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
|
result = verify_bgp_convergence(tgen, topo)
|
||||||
|
if result != True:
|
||||||
|
assert False, "Testcase " + tc_name + " :Failed \n Error: {}".format(result)
|
||||||
|
|
||||||
write_test_footer(tc_name)
|
write_test_footer(tc_name)
|
||||||
|
|
||||||
@ -331,6 +347,13 @@ def test_BGP_config_with_2byteAS_and_4byteAS_number_p1(request):
|
|||||||
tc_name = request.node.name
|
tc_name = request.node.name
|
||||||
write_test_header(tc_name)
|
write_test_header(tc_name)
|
||||||
|
|
||||||
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
|
result = verify_bgp_convergence(tgen, topo)
|
||||||
|
if result != True:
|
||||||
|
assert False, "Testcase " + tc_name + " :Failed \n Error: {}".format(result)
|
||||||
|
|
||||||
# Api call to modify AS number
|
# Api call to modify AS number
|
||||||
input_dict = {
|
input_dict = {
|
||||||
"r1": {"bgp": {"local_as": 131079}},
|
"r1": {"bgp": {"local_as": 131079}},
|
||||||
@ -586,7 +609,8 @@ def test_BGP_attributes_with_vrf_default_keyword_p0(request):
|
|||||||
if tgen.routers_have_failure():
|
if tgen.routers_have_failure():
|
||||||
pytest.skip(tgen.errors)
|
pytest.skip(tgen.errors)
|
||||||
|
|
||||||
# reset_config_on_routers(tgen)
|
# Creating configuration from JSON
|
||||||
|
reset_config_on_routers(tgen)
|
||||||
|
|
||||||
step("Configure static routes and redistribute in BGP on R3")
|
step("Configure static routes and redistribute in BGP on R3")
|
||||||
for addr_type in ADDR_TYPES:
|
for addr_type in ADDR_TYPES:
|
||||||
|
@ -244,11 +244,11 @@ def pytest_runtest_makereport(item, call):
|
|||||||
modname = parent.module.__name__
|
modname = parent.module.__name__
|
||||||
|
|
||||||
# Treat skips as non errors, don't pause after
|
# Treat skips as non errors, don't pause after
|
||||||
if call.excinfo.typename != "AssertionError":
|
if call.excinfo.typename == "Skipped":
|
||||||
pause = False
|
pause = False
|
||||||
error = False
|
error = False
|
||||||
logger.info(
|
logger.info(
|
||||||
'assert skipped at "{}/{}": {}'.format(
|
'test skipped at "{}/{}": {}'.format(
|
||||||
modname, item.name, call.excinfo.value
|
modname, item.name, call.excinfo.value
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@ -257,7 +257,7 @@ def pytest_runtest_makereport(item, call):
|
|||||||
# Handle assert failures
|
# Handle assert failures
|
||||||
parent._previousfailed = item # pylint: disable=W0212
|
parent._previousfailed = item # pylint: disable=W0212
|
||||||
logger.error(
|
logger.error(
|
||||||
'assert failed at "{}/{}": {}'.format(
|
'test failed at "{}/{}": {}'.format(
|
||||||
modname, item.name, call.excinfo.value
|
modname, item.name, call.excinfo.value
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -33,7 +33,7 @@ from lib.topotest import frr_unicode
|
|||||||
|
|
||||||
# Import common_config to use commomnly used APIs
|
# Import common_config to use commomnly used APIs
|
||||||
from lib.common_config import (
|
from lib.common_config import (
|
||||||
create_common_configuration,
|
create_common_configurations,
|
||||||
InvalidCLIError,
|
InvalidCLIError,
|
||||||
load_config_to_router,
|
load_config_to_router,
|
||||||
check_address_types,
|
check_address_types,
|
||||||
@ -148,6 +148,8 @@ def create_router_bgp(tgen, topo, input_dict=None, build=False, load_config=True
|
|||||||
topo = topo["routers"]
|
topo = topo["routers"]
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "bgp" not in input_dict[router]:
|
if "bgp" not in input_dict[router]:
|
||||||
logger.debug("Router %s: 'bgp' not present in input_dict", router)
|
logger.debug("Router %s: 'bgp' not present in input_dict", router)
|
||||||
@ -158,6 +160,8 @@ def create_router_bgp(tgen, topo, input_dict=None, build=False, load_config=True
|
|||||||
if type(bgp_data_list) is not list:
|
if type(bgp_data_list) is not list:
|
||||||
bgp_data_list = [bgp_data_list]
|
bgp_data_list = [bgp_data_list]
|
||||||
|
|
||||||
|
config_data = []
|
||||||
|
|
||||||
for bgp_data in bgp_data_list:
|
for bgp_data in bgp_data_list:
|
||||||
data_all_bgp = __create_bgp_global(tgen, bgp_data, router, build)
|
data_all_bgp = __create_bgp_global(tgen, bgp_data, router, build)
|
||||||
if data_all_bgp:
|
if data_all_bgp:
|
||||||
@ -198,16 +202,19 @@ def create_router_bgp(tgen, topo, input_dict=None, build=False, load_config=True
|
|||||||
data_all_bgp = __create_l2vpn_evpn_address_family(
|
data_all_bgp = __create_l2vpn_evpn_address_family(
|
||||||
tgen, topo, bgp_data, router, config_data=data_all_bgp
|
tgen, topo, bgp_data, router, config_data=data_all_bgp
|
||||||
)
|
)
|
||||||
|
if data_all_bgp:
|
||||||
|
config_data.extend(data_all_bgp)
|
||||||
|
|
||||||
try:
|
if config_data:
|
||||||
result = create_common_configuration(
|
config_data_dict[router] = config_data
|
||||||
tgen, router, data_all_bgp, "bgp", build, load_config
|
|
||||||
)
|
try:
|
||||||
except InvalidCLIError:
|
result = create_common_configurations(
|
||||||
# Traceback
|
tgen, config_data_dict, "bgp", build, load_config
|
||||||
errormsg = traceback.format_exc()
|
)
|
||||||
logger.error(errormsg)
|
except InvalidCLIError:
|
||||||
return errormsg
|
logger.error("create_router_bgp", exc_info=True)
|
||||||
|
result = False
|
||||||
|
|
||||||
logger.debug("Exiting lib API: create_router_bgp()")
|
logger.debug("Exiting lib API: create_router_bgp()")
|
||||||
return result
|
return result
|
||||||
@ -226,7 +233,7 @@ def __create_bgp_global(tgen, input_dict, router, build=False):
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
True or False
|
list of config commands
|
||||||
"""
|
"""
|
||||||
|
|
||||||
result = False
|
result = False
|
||||||
@ -241,7 +248,7 @@ def __create_bgp_global(tgen, input_dict, router, build=False):
|
|||||||
logger.debug(
|
logger.debug(
|
||||||
"Router %s: 'local_as' not present in input_dict" "for BGP", router
|
"Router %s: 'local_as' not present in input_dict" "for BGP", router
|
||||||
)
|
)
|
||||||
return False
|
return config_data
|
||||||
|
|
||||||
local_as = bgp_data.setdefault("local_as", "")
|
local_as = bgp_data.setdefault("local_as", "")
|
||||||
cmd = "router bgp {}".format(local_as)
|
cmd = "router bgp {}".format(local_as)
|
||||||
@ -1532,15 +1539,16 @@ def modify_as_number(tgen, topo, input_dict):
|
|||||||
create_router_bgp(tgen, topo, router_dict)
|
create_router_bgp(tgen, topo, router_dict)
|
||||||
|
|
||||||
logger.info("Applying modified bgp configuration")
|
logger.info("Applying modified bgp configuration")
|
||||||
create_router_bgp(tgen, new_topo)
|
result = create_router_bgp(tgen, new_topo)
|
||||||
|
if result is not True:
|
||||||
|
result = "Error applying new AS number config"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errormsg = traceback.format_exc()
|
errormsg = traceback.format_exc()
|
||||||
logger.error(errormsg)
|
logger.error(errormsg)
|
||||||
return errormsg
|
return errormsg
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
return True
|
return result
|
||||||
|
|
||||||
|
|
||||||
@retry(retry_timeout=8)
|
@retry(retry_timeout=8)
|
||||||
|
@ -27,6 +27,7 @@ from re import search as re_search
|
|||||||
from tempfile import mkdtemp
|
from tempfile import mkdtemp
|
||||||
|
|
||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import traceback
|
import traceback
|
||||||
@ -275,7 +276,8 @@ def apply_raw_config(tgen, input_dict):
|
|||||||
True or errormsg
|
True or errormsg
|
||||||
"""
|
"""
|
||||||
|
|
||||||
result = True
|
rlist = []
|
||||||
|
|
||||||
for router_name in input_dict.keys():
|
for router_name in input_dict.keys():
|
||||||
config_cmd = input_dict[router_name]["raw_config"]
|
config_cmd = input_dict[router_name]["raw_config"]
|
||||||
|
|
||||||
@ -287,13 +289,14 @@ def apply_raw_config(tgen, input_dict):
|
|||||||
for cmd in config_cmd:
|
for cmd in config_cmd:
|
||||||
cfg.write("{}\n".format(cmd))
|
cfg.write("{}\n".format(cmd))
|
||||||
|
|
||||||
result = load_config_to_router(tgen, router_name)
|
rlist.append(router_name)
|
||||||
|
|
||||||
return result
|
# Load config on all routers
|
||||||
|
return load_config_to_routers(tgen, rlist)
|
||||||
|
|
||||||
|
|
||||||
def create_common_configuration(
|
def create_common_configurations(
|
||||||
tgen, router, data, config_type=None, build=False, load_config=True
|
tgen, config_dict, config_type=None, build=False, load_config=True
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
API to create object of class FRRConfig and also create frr_json.conf
|
API to create object of class FRRConfig and also create frr_json.conf
|
||||||
@ -302,8 +305,8 @@ def create_common_configuration(
|
|||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
* `tgen`: tgen object
|
* `tgen`: tgen object
|
||||||
* `data`: Configuration data saved in a list.
|
* `config_dict`: Configuration data saved in a dict of { router: config-list }
|
||||||
* `router` : router id to be configured.
|
* `routers` : list of router id to be configured.
|
||||||
* `config_type` : Syntactic information while writing configuration. Should
|
* `config_type` : Syntactic information while writing configuration. Should
|
||||||
be one of the value as mentioned in the config_map below.
|
be one of the value as mentioned in the config_map below.
|
||||||
* `build` : Only for initial setup phase this is set as True
|
* `build` : Only for initial setup phase this is set as True
|
||||||
@ -313,8 +316,6 @@ def create_common_configuration(
|
|||||||
"""
|
"""
|
||||||
TMPDIR = os.path.join(LOGDIR, tgen.modname)
|
TMPDIR = os.path.join(LOGDIR, tgen.modname)
|
||||||
|
|
||||||
fname = "{}/{}/{}".format(TMPDIR, router, FRRCFG_FILE)
|
|
||||||
|
|
||||||
config_map = OrderedDict(
|
config_map = OrderedDict(
|
||||||
{
|
{
|
||||||
"general_config": "! FRR General Config\n",
|
"general_config": "! FRR General Config\n",
|
||||||
@ -339,27 +340,55 @@ def create_common_configuration(
|
|||||||
else:
|
else:
|
||||||
mode = "w"
|
mode = "w"
|
||||||
|
|
||||||
try:
|
routers = config_dict.keys()
|
||||||
frr_cfg_fd = open(fname, mode)
|
for router in routers:
|
||||||
if config_type:
|
fname = "{}/{}/{}".format(TMPDIR, router, FRRCFG_FILE)
|
||||||
frr_cfg_fd.write(config_map[config_type])
|
try:
|
||||||
for line in data:
|
frr_cfg_fd = open(fname, mode)
|
||||||
frr_cfg_fd.write("{} \n".format(str(line)))
|
if config_type:
|
||||||
frr_cfg_fd.write("\n")
|
frr_cfg_fd.write(config_map[config_type])
|
||||||
|
for line in config_dict[router]:
|
||||||
|
frr_cfg_fd.write("{} \n".format(str(line)))
|
||||||
|
frr_cfg_fd.write("\n")
|
||||||
|
|
||||||
except IOError as err:
|
except IOError as err:
|
||||||
logger.error(
|
logger.error(
|
||||||
"Unable to open FRR Config File. error(%s): %s" % (err.errno, err.strerror)
|
"Unable to open FRR Config '%s': %s" % (fname, str(err))
|
||||||
)
|
)
|
||||||
return False
|
return False
|
||||||
finally:
|
finally:
|
||||||
frr_cfg_fd.close()
|
frr_cfg_fd.close()
|
||||||
|
|
||||||
# If configuration applied from build, it will done at last
|
# If configuration applied from build, it will done at last
|
||||||
|
result = True
|
||||||
if not build and load_config:
|
if not build and load_config:
|
||||||
load_config_to_router(tgen, router)
|
result = load_config_to_routers(tgen, routers)
|
||||||
|
|
||||||
return True
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def create_common_configuration(
|
||||||
|
tgen, router, data, config_type=None, build=False, load_config=True
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
API to create object of class FRRConfig and also create frr_json.conf
|
||||||
|
file. It will create interface and common configurations and save it to
|
||||||
|
frr_json.conf and load to router
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
* `tgen`: tgen object
|
||||||
|
* `data`: Configuration data saved in a list.
|
||||||
|
* `router` : router id to be configured.
|
||||||
|
* `config_type` : Syntactic information while writing configuration. Should
|
||||||
|
be one of the value as mentioned in the config_map below.
|
||||||
|
* `build` : Only for initial setup phase this is set as True
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
True or False
|
||||||
|
"""
|
||||||
|
return create_common_configurations(
|
||||||
|
tgen, {router: data}, config_type, build, load_config
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def kill_router_daemons(tgen, router, daemons, save_config=True):
|
def kill_router_daemons(tgen, router, daemons, save_config=True):
|
||||||
@ -541,8 +570,8 @@ def reset_config_on_routers(tgen, routerName=None):
|
|||||||
'\nvtysh config apply => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
'\nvtysh config apply => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
router_list[rname].logger.error(
|
router_list[rname].logger.warning(
|
||||||
'\nvtysh config apply => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
'\nvtysh config apply failed => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
||||||
)
|
)
|
||||||
logger.error("Delta file apply for %s failed %d: %s", rname, p.returncode, output)
|
logger.error("Delta file apply for %s failed %d: %s", rname, p.returncode, output)
|
||||||
|
|
||||||
@ -570,16 +599,114 @@ def reset_config_on_routers(tgen, routerName=None):
|
|||||||
for rname, p in procs.items():
|
for rname, p in procs.items():
|
||||||
output, _ = p.communicate()
|
output, _ = p.communicate()
|
||||||
if p.returncode:
|
if p.returncode:
|
||||||
logger.warning(
|
logger.warning("Get running config for %s failed %d: %s", rname, p.returncode, output)
|
||||||
"Get running config for %s failed %d: %s", rname, p.returncode, output
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
logger.info("Configuration on router {} after reset:\n{}".format(rname, output))
|
logger.info("Configuration on router %s after reset:\n%s", rname, output)
|
||||||
|
|
||||||
logger.debug("Exiting API: reset_config_on_routers")
|
logger.debug("Exiting API: reset_config_on_routers")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def load_config_to_routers(tgen, routers, save_bkup=False):
|
||||||
|
"""
|
||||||
|
Loads configuration on routers from the file FRRCFG_FILE.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
* `tgen` : Topogen object
|
||||||
|
* `routers` : routers for which configuration is to be loaded
|
||||||
|
* `save_bkup` : If True, Saves snapshot of FRRCFG_FILE to FRRCFG_BKUP_FILE
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
True or False
|
||||||
|
"""
|
||||||
|
|
||||||
|
logger.debug("Entering API: load_config_to_routers")
|
||||||
|
|
||||||
|
base_router_list = tgen.routers()
|
||||||
|
router_list = {}
|
||||||
|
for router in routers:
|
||||||
|
if (router not in ROUTER_LIST) or (router not in base_router_list):
|
||||||
|
continue
|
||||||
|
router_list[router] = base_router_list[router]
|
||||||
|
|
||||||
|
frr_cfg_file_fmt = TMPDIR + "/{}/" + FRRCFG_FILE
|
||||||
|
frr_cfg_bkup_fmt = TMPDIR + "/{}/" + FRRCFG_BKUP_FILE
|
||||||
|
|
||||||
|
procs = {}
|
||||||
|
for rname in router_list:
|
||||||
|
router = router_list[rname]
|
||||||
|
try:
|
||||||
|
frr_cfg_file = frr_cfg_file_fmt.format(rname)
|
||||||
|
frr_cfg_bkup = frr_cfg_bkup_fmt.format(rname)
|
||||||
|
with open(frr_cfg_file, "r+") as cfg:
|
||||||
|
data = cfg.read()
|
||||||
|
logger.info(
|
||||||
|
"Applying following configuration on router"
|
||||||
|
" {}:\n{}".format(rname, data)
|
||||||
|
)
|
||||||
|
if save_bkup:
|
||||||
|
with open(frr_cfg_bkup, "w") as bkup:
|
||||||
|
bkup.write(data)
|
||||||
|
procs[rname] = router_list[rname].popen(
|
||||||
|
["/usr/bin/env", "vtysh", "-f", frr_cfg_file],
|
||||||
|
stdin=None,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
)
|
||||||
|
except IOError as err:
|
||||||
|
logging.error(
|
||||||
|
"Unable to open config File. error(%s): %s",
|
||||||
|
err.errno, err.strerror
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
except Exception as error:
|
||||||
|
logging.error("Unable to apply config on %s: %s", rname, str(error))
|
||||||
|
return False
|
||||||
|
|
||||||
|
errors = []
|
||||||
|
for rname, p in procs.items():
|
||||||
|
output, _ = p.communicate()
|
||||||
|
frr_cfg_file = frr_cfg_file_fmt.format(rname)
|
||||||
|
vtysh_command = "vtysh -f " + frr_cfg_file
|
||||||
|
if not p.returncode:
|
||||||
|
router_list[rname].logger.info(
|
||||||
|
'\nvtysh config apply => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
router_list[rname].logger.error(
|
||||||
|
'\nvtysh config apply failed => "{}"\nvtysh output <= "{}"'.format(vtysh_command, output)
|
||||||
|
)
|
||||||
|
logger.error("Config apply for %s failed %d: %s", rname, p.returncode, output)
|
||||||
|
# We can't thorw an exception here as we won't clear the config file.
|
||||||
|
errors.append(InvalidCLIError("load_config_to_routers error for {}: {}".format(rname, output)))
|
||||||
|
|
||||||
|
# Empty the config file or we append to it next time through.
|
||||||
|
with open(frr_cfg_file, "r+") as cfg:
|
||||||
|
cfg.truncate(0)
|
||||||
|
|
||||||
|
# Router current configuration to log file or console if
|
||||||
|
# "show_router_config" is defined in "pytest.ini"
|
||||||
|
if show_router_config:
|
||||||
|
procs = {}
|
||||||
|
for rname in router_list:
|
||||||
|
procs[rname] = router_list[rname].popen(
|
||||||
|
["/usr/bin/env", "vtysh", "-c", "show running-config no-header"],
|
||||||
|
stdin=None,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.STDOUT,
|
||||||
|
)
|
||||||
|
for rname, p in procs.items():
|
||||||
|
output, _ = p.communicate()
|
||||||
|
if p.returncode:
|
||||||
|
logger.warning("Get running config for %s failed %d: %s", rname, p.returncode, output)
|
||||||
|
else:
|
||||||
|
logger.info("New configuration for router %s:\n%s", rname,output)
|
||||||
|
|
||||||
|
logger.debug("Exiting API: load_config_to_routers")
|
||||||
|
return not errors
|
||||||
|
|
||||||
|
|
||||||
def load_config_to_router(tgen, routerName, save_bkup=False):
|
def load_config_to_router(tgen, routerName, save_bkup=False):
|
||||||
"""
|
"""
|
||||||
Loads configuration on router from the file FRRCFG_FILE.
|
Loads configuration on router from the file FRRCFG_FILE.
|
||||||
@ -590,52 +717,7 @@ def load_config_to_router(tgen, routerName, save_bkup=False):
|
|||||||
* `routerName` : router for which configuration to be loaded
|
* `routerName` : router for which configuration to be loaded
|
||||||
* `save_bkup` : If True, Saves snapshot of FRRCFG_FILE to FRRCFG_BKUP_FILE
|
* `save_bkup` : If True, Saves snapshot of FRRCFG_FILE to FRRCFG_BKUP_FILE
|
||||||
"""
|
"""
|
||||||
|
return load_config_to_routers(tgen, [routerName], save_bkup)
|
||||||
logger.debug("Entering API: load_config_to_router")
|
|
||||||
|
|
||||||
router_list = tgen.routers()
|
|
||||||
for rname in ROUTER_LIST:
|
|
||||||
if routerName and rname != routerName:
|
|
||||||
continue
|
|
||||||
|
|
||||||
router = router_list[rname]
|
|
||||||
try:
|
|
||||||
frr_cfg_file = "{}/{}/{}".format(TMPDIR, rname, FRRCFG_FILE)
|
|
||||||
frr_cfg_bkup = "{}/{}/{}".format(TMPDIR, rname, FRRCFG_BKUP_FILE)
|
|
||||||
with open(frr_cfg_file, "r+") as cfg:
|
|
||||||
data = cfg.read()
|
|
||||||
logger.info(
|
|
||||||
"Applying following configuration on router"
|
|
||||||
" {}:\n{}".format(rname, data)
|
|
||||||
)
|
|
||||||
if save_bkup:
|
|
||||||
with open(frr_cfg_bkup, "w") as bkup:
|
|
||||||
bkup.write(data)
|
|
||||||
|
|
||||||
output = router.vtysh_multicmd(data, pretty_output=False)
|
|
||||||
for out_err in ERROR_LIST:
|
|
||||||
if out_err.lower() in output.lower():
|
|
||||||
raise InvalidCLIError("%s" % output)
|
|
||||||
|
|
||||||
cfg.truncate(0)
|
|
||||||
|
|
||||||
except IOError as err:
|
|
||||||
errormsg = (
|
|
||||||
"Unable to open config File. error(%s):" " %s",
|
|
||||||
(err.errno, err.strerror),
|
|
||||||
)
|
|
||||||
return errormsg
|
|
||||||
|
|
||||||
# Router current configuration to log file or console if
|
|
||||||
# "show_router_config" is defined in "pytest.ini"
|
|
||||||
if show_router_config:
|
|
||||||
logger.info("New configuration for router {}:".format(rname))
|
|
||||||
new_config = router.run("vtysh -c 'show running'")
|
|
||||||
logger.info(new_config)
|
|
||||||
|
|
||||||
logger.debug("Exiting API: load_config_to_router")
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def get_frr_ipv6_linklocal(tgen, router, intf=None, vrf=None):
|
def get_frr_ipv6_linklocal(tgen, router, intf=None, vrf=None):
|
||||||
@ -1174,6 +1256,8 @@ def create_debug_log_config(tgen, input_dict, build=False):
|
|||||||
|
|
||||||
result = False
|
result = False
|
||||||
try:
|
try:
|
||||||
|
debug_config_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
debug_config = []
|
debug_config = []
|
||||||
if "debug" in input_dict[router]:
|
if "debug" in input_dict[router]:
|
||||||
@ -1204,10 +1288,12 @@ def create_debug_log_config(tgen, input_dict, build=False):
|
|||||||
for daemon, debug_logs in disable_logs.items():
|
for daemon, debug_logs in disable_logs.items():
|
||||||
for debug_log in debug_logs:
|
for debug_log in debug_logs:
|
||||||
debug_config.append("no {}".format(debug_log))
|
debug_config.append("no {}".format(debug_log))
|
||||||
|
if debug_config:
|
||||||
|
debug_config_dict[router] = debug_config
|
||||||
|
|
||||||
result = create_common_configuration(
|
result = create_common_configurations(
|
||||||
tgen, router, debug_config, "debug_log_config", build=build
|
tgen, debug_config_dict, "debug_log_config", build=build
|
||||||
)
|
)
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
errormsg = traceback.format_exc()
|
errormsg = traceback.format_exc()
|
||||||
@ -1285,11 +1371,14 @@ def create_vrf_cfg(tgen, topo, input_dict=None, build=False):
|
|||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for c_router, c_data in input_dict.items():
|
for c_router, c_data in input_dict.items():
|
||||||
rnode = tgen.routers()[c_router]
|
rnode = tgen.routers()[c_router]
|
||||||
|
config_data = []
|
||||||
|
|
||||||
if "vrfs" in c_data:
|
if "vrfs" in c_data:
|
||||||
for vrf in c_data["vrfs"]:
|
for vrf in c_data["vrfs"]:
|
||||||
config_data = []
|
|
||||||
del_action = vrf.setdefault("delete", False)
|
del_action = vrf.setdefault("delete", False)
|
||||||
name = vrf.setdefault("name", None)
|
name = vrf.setdefault("name", None)
|
||||||
table_id = vrf.setdefault("id", None)
|
table_id = vrf.setdefault("id", None)
|
||||||
@ -1366,9 +1455,12 @@ def create_vrf_cfg(tgen, topo, input_dict=None, build=False):
|
|||||||
cmd = "no vni {}".format(del_vni)
|
cmd = "no vni {}".format(del_vni)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, c_router, config_data, "vrf", build=build
|
config_data_dict[c_router] = config_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "vrf", build=build
|
||||||
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
@ -1638,7 +1730,8 @@ def interface_status(tgen, topo, input_dict):
|
|||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
global frr_cfg
|
rlist = []
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
|
|
||||||
interface_list = input_dict[router]["interface_list"]
|
interface_list = input_dict[router]["interface_list"]
|
||||||
@ -1647,8 +1740,10 @@ def interface_status(tgen, topo, input_dict):
|
|||||||
rnode = tgen.routers()[router]
|
rnode = tgen.routers()[router]
|
||||||
interface_set_status(rnode, intf, status)
|
interface_set_status(rnode, intf, status)
|
||||||
|
|
||||||
# Load config to router
|
rlist.append(router)
|
||||||
load_config_to_router(tgen, router)
|
|
||||||
|
# Load config to routers
|
||||||
|
load_config_to_routers(tgen, rlist)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
errormsg = traceback.format_exc()
|
errormsg = traceback.format_exc()
|
||||||
@ -1837,6 +1932,8 @@ def create_interfaces_cfg(tgen, topo, build=False):
|
|||||||
topo = deepcopy(topo)
|
topo = deepcopy(topo)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
interface_data_dict = {}
|
||||||
|
|
||||||
for c_router, c_data in topo.items():
|
for c_router, c_data in topo.items():
|
||||||
interface_data = []
|
interface_data = []
|
||||||
for destRouterLink, data in sorted(c_data["links"].items()):
|
for destRouterLink, data in sorted(c_data["links"].items()):
|
||||||
@ -1903,10 +2000,12 @@ def create_interfaces_cfg(tgen, topo, build=False):
|
|||||||
interface_data += _create_interfaces_ospf_cfg(
|
interface_data += _create_interfaces_ospf_cfg(
|
||||||
"ospf6", c_data, data, ospf_keywords + ["area"]
|
"ospf6", c_data, data, ospf_keywords + ["area"]
|
||||||
)
|
)
|
||||||
|
if interface_data:
|
||||||
|
interface_data_dict[c_router] = interface_data
|
||||||
|
|
||||||
result = create_common_configuration(
|
result = create_common_configurations(
|
||||||
tgen, c_router, interface_data, "interface_config", build=build
|
tgen, interface_data_dict, "interface_config", build=build
|
||||||
)
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
@ -1965,6 +2064,8 @@ def create_static_routes(tgen, input_dict, build=False):
|
|||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
static_routes_list_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "static_routes" not in input_dict[router]:
|
if "static_routes" not in input_dict[router]:
|
||||||
errormsg = "static_routes not present in input_dict"
|
errormsg = "static_routes not present in input_dict"
|
||||||
@ -2020,9 +2121,12 @@ def create_static_routes(tgen, input_dict, build=False):
|
|||||||
|
|
||||||
static_routes_list.append(cmd)
|
static_routes_list.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if static_routes_list:
|
||||||
tgen, router, static_routes_list, "static_route", build=build
|
static_routes_list_dict[router] = static_routes_list
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, static_routes_list_dict, "static_route", build=build
|
||||||
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
@ -2079,6 +2183,8 @@ def create_prefix_lists(tgen, input_dict, build=False):
|
|||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
result = False
|
result = False
|
||||||
try:
|
try:
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "prefix_lists" not in input_dict[router]:
|
if "prefix_lists" not in input_dict[router]:
|
||||||
errormsg = "prefix_lists not present in input_dict"
|
errormsg = "prefix_lists not present in input_dict"
|
||||||
@ -2125,9 +2231,12 @@ def create_prefix_lists(tgen, input_dict, build=False):
|
|||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
|
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, router, config_data, "prefix_list", build=build
|
config_data_dict[router] = config_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "prefix_list", build=build
|
||||||
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
@ -2223,6 +2332,8 @@ def create_route_maps(tgen, input_dict, build=False):
|
|||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
try:
|
try:
|
||||||
|
rmap_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "route_maps" not in input_dict[router]:
|
if "route_maps" not in input_dict[router]:
|
||||||
logger.debug("route_maps not present in input_dict")
|
logger.debug("route_maps not present in input_dict")
|
||||||
@ -2500,9 +2611,12 @@ def create_route_maps(tgen, input_dict, build=False):
|
|||||||
cmd = "match metric {}".format(metric)
|
cmd = "match metric {}".format(metric)
|
||||||
rmap_data.append(cmd)
|
rmap_data.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if rmap_data:
|
||||||
tgen, router, rmap_data, "route_maps", build=build
|
rmap_data_dict[router] = rmap_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, rmap_data_dict, "route_maps", build=build
|
||||||
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
@ -2577,6 +2691,8 @@ def create_bgp_community_lists(tgen, input_dict, build=False):
|
|||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
try:
|
try:
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "bgp_community_lists" not in input_dict[router]:
|
if "bgp_community_lists" not in input_dict[router]:
|
||||||
errormsg = "bgp_community_lists not present in input_dict"
|
errormsg = "bgp_community_lists not present in input_dict"
|
||||||
@ -2613,9 +2729,12 @@ def create_bgp_community_lists(tgen, input_dict, build=False):
|
|||||||
|
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, router, config_data, "bgp_community_list", build=build
|
config_data_dict[router] = config_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "bgp_community_list", build=build
|
||||||
|
)
|
||||||
|
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
# Traceback
|
||||||
|
@ -18,7 +18,6 @@
|
|||||||
# OF THIS SOFTWARE.
|
# OF THIS SOFTWARE.
|
||||||
#
|
#
|
||||||
|
|
||||||
import traceback
|
|
||||||
import ipaddr
|
import ipaddr
|
||||||
import ipaddress
|
import ipaddress
|
||||||
import sys
|
import sys
|
||||||
@ -32,7 +31,7 @@ import sys
|
|||||||
|
|
||||||
# Import common_config to use commomnly used APIs
|
# Import common_config to use commomnly used APIs
|
||||||
from lib.common_config import (
|
from lib.common_config import (
|
||||||
create_common_configuration,
|
create_common_configurations,
|
||||||
InvalidCLIError,
|
InvalidCLIError,
|
||||||
retry,
|
retry,
|
||||||
generate_ips,
|
generate_ips,
|
||||||
@ -86,32 +85,36 @@ def create_router_ospf(tgen, topo, input_dict=None, build=False, load_config=Tru
|
|||||||
topo = topo["routers"]
|
topo = topo["routers"]
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for ospf in ["ospf", "ospf6"]:
|
||||||
if "ospf" not in input_dict[router]:
|
config_data_dict = {}
|
||||||
logger.debug("Router %s: 'ospf' not present in input_dict", router)
|
|
||||||
continue
|
|
||||||
|
|
||||||
result = __create_ospf_global(tgen, input_dict, router, build, load_config)
|
for router in input_dict.keys():
|
||||||
if result is True:
|
if ospf not in input_dict[router]:
|
||||||
ospf_data = input_dict[router]["ospf"]
|
logger.debug("Router %s: %s not present in input_dict", router, ospf)
|
||||||
|
continue
|
||||||
|
|
||||||
for router in input_dict.keys():
|
config_data = __create_ospf_global(
|
||||||
if "ospf6" not in input_dict[router]:
|
tgen, input_dict, router, build, load_config, ospf
|
||||||
logger.debug("Router %s: 'ospf6' not present in input_dict", router)
|
)
|
||||||
continue
|
if config_data:
|
||||||
|
if router not in config_data_dict:
|
||||||
result = __create_ospf_global(
|
config_data_dict[router] = config_data
|
||||||
tgen, input_dict, router, build, load_config, ospf="ospf6"
|
else:
|
||||||
)
|
config_data_dict[router].extend(config_data)
|
||||||
if result is True:
|
try:
|
||||||
ospf_data = input_dict[router]["ospf6"]
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, ospf, build, load_config
|
||||||
|
)
|
||||||
|
except InvalidCLIError:
|
||||||
|
logger.error("create_router_ospf (ipv4)", exc_info=True)
|
||||||
|
result = False
|
||||||
|
|
||||||
logger.debug("Exiting lib API: create_router_ospf()")
|
logger.debug("Exiting lib API: create_router_ospf()")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def __create_ospf_global(
|
def __create_ospf_global(
|
||||||
tgen, input_dict, router, build=False, load_config=True, ospf="ospf"
|
tgen, input_dict, router, build, load_config, ospf
|
||||||
):
|
):
|
||||||
"""
|
"""
|
||||||
Helper API to create ospf global configuration.
|
Helper API to create ospf global configuration.
|
||||||
@ -133,12 +136,12 @@ def __create_ospf_global(
|
|||||||
"links": {
|
"links": {
|
||||||
"r3": {
|
"r3": {
|
||||||
"ipv6": "2013:13::1/64",
|
"ipv6": "2013:13::1/64",
|
||||||
"ospf6": {
|
"ospf6": {
|
||||||
"hello_interval": 1,
|
"hello_interval": 1,
|
||||||
"dead_interval": 4,
|
"dead_interval": 4,
|
||||||
"network": "point-to-point"
|
"network": "point-to-point"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"ospf6": {
|
"ospf6": {
|
||||||
"router_id": "1.1.1.1",
|
"router_id": "1.1.1.1",
|
||||||
@ -153,229 +156,221 @@ def __create_ospf_global(
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
True or False
|
list of configuration commands
|
||||||
"""
|
"""
|
||||||
|
|
||||||
result = False
|
config_data = []
|
||||||
|
|
||||||
|
if ospf not in input_dict[router]:
|
||||||
|
return config_data
|
||||||
|
|
||||||
logger.debug("Entering lib API: __create_ospf_global()")
|
logger.debug("Entering lib API: __create_ospf_global()")
|
||||||
try:
|
|
||||||
|
|
||||||
ospf_data = input_dict[router][ospf]
|
ospf_data = input_dict[router][ospf]
|
||||||
del_ospf_action = ospf_data.setdefault("delete", False)
|
del_ospf_action = ospf_data.setdefault("delete", False)
|
||||||
if del_ospf_action:
|
if del_ospf_action:
|
||||||
config_data = ["no router {}".format(ospf)]
|
config_data = ["no router {}".format(ospf)]
|
||||||
result = create_common_configuration(
|
return config_data
|
||||||
tgen, router, config_data, ospf, build, load_config
|
|
||||||
)
|
|
||||||
return result
|
|
||||||
|
|
||||||
config_data = []
|
cmd = "router {}".format(ospf)
|
||||||
cmd = "router {}".format(ospf)
|
|
||||||
|
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
# router id
|
||||||
|
router_id = ospf_data.setdefault("router_id", None)
|
||||||
|
del_router_id = ospf_data.setdefault("del_router_id", False)
|
||||||
|
if del_router_id:
|
||||||
|
config_data.append("no {} router-id".format(ospf))
|
||||||
|
if router_id:
|
||||||
|
config_data.append("{} router-id {}".format(ospf, router_id))
|
||||||
|
|
||||||
|
# log-adjacency-changes
|
||||||
|
log_adj_changes = ospf_data.setdefault("log_adj_changes", None)
|
||||||
|
del_log_adj_changes = ospf_data.setdefault("del_log_adj_changes", False)
|
||||||
|
if del_log_adj_changes:
|
||||||
|
config_data.append("no log-adjacency-changes detail")
|
||||||
|
if log_adj_changes:
|
||||||
|
config_data.append("log-adjacency-changes {}".format(
|
||||||
|
log_adj_changes))
|
||||||
|
|
||||||
|
# aggregation timer
|
||||||
|
aggr_timer = ospf_data.setdefault("aggr_timer", None)
|
||||||
|
del_aggr_timer = ospf_data.setdefault("del_aggr_timer", False)
|
||||||
|
if del_aggr_timer:
|
||||||
|
config_data.append("no aggregation timer")
|
||||||
|
if aggr_timer:
|
||||||
|
config_data.append("aggregation timer {}".format(
|
||||||
|
aggr_timer))
|
||||||
|
|
||||||
|
# maximum path information
|
||||||
|
ecmp_data = ospf_data.setdefault("maximum-paths", {})
|
||||||
|
if ecmp_data:
|
||||||
|
cmd = "maximum-paths {}".format(ecmp_data)
|
||||||
|
del_action = ospf_data.setdefault("del_max_path", False)
|
||||||
|
if del_action:
|
||||||
|
cmd = "no maximum-paths"
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
# router id
|
# redistribute command
|
||||||
router_id = ospf_data.setdefault("router_id", None)
|
redistribute_data = ospf_data.setdefault("redistribute", {})
|
||||||
del_router_id = ospf_data.setdefault("del_router_id", False)
|
if redistribute_data:
|
||||||
if del_router_id:
|
for redistribute in redistribute_data:
|
||||||
config_data.append("no {} router-id".format(ospf))
|
if "redist_type" not in redistribute:
|
||||||
if router_id:
|
|
||||||
config_data.append("{} router-id {}".format(ospf, router_id))
|
|
||||||
|
|
||||||
# log-adjacency-changes
|
|
||||||
log_adj_changes = ospf_data.setdefault("log_adj_changes", None)
|
|
||||||
del_log_adj_changes = ospf_data.setdefault("del_log_adj_changes", False)
|
|
||||||
if del_log_adj_changes:
|
|
||||||
config_data.append("no log-adjacency-changes detail")
|
|
||||||
if log_adj_changes:
|
|
||||||
config_data.append("log-adjacency-changes {}".format(log_adj_changes))
|
|
||||||
|
|
||||||
# aggregation timer
|
|
||||||
aggr_timer = ospf_data.setdefault("aggr_timer", None)
|
|
||||||
del_aggr_timer = ospf_data.setdefault("del_aggr_timer", False)
|
|
||||||
if del_aggr_timer:
|
|
||||||
config_data.append("no aggregation timer")
|
|
||||||
if aggr_timer:
|
|
||||||
config_data.append("aggregation timer {}".format(aggr_timer))
|
|
||||||
|
|
||||||
# maximum path information
|
|
||||||
ecmp_data = ospf_data.setdefault("maximum-paths", {})
|
|
||||||
if ecmp_data:
|
|
||||||
cmd = "maximum-paths {}".format(ecmp_data)
|
|
||||||
del_action = ospf_data.setdefault("del_max_path", False)
|
|
||||||
if del_action:
|
|
||||||
cmd = "no maximum-paths"
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
# redistribute command
|
|
||||||
redistribute_data = ospf_data.setdefault("redistribute", {})
|
|
||||||
if redistribute_data:
|
|
||||||
for redistribute in redistribute_data:
|
|
||||||
if "redist_type" not in redistribute:
|
|
||||||
logger.debug(
|
|
||||||
"Router %s: 'redist_type' not present in " "input_dict", router
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cmd = "redistribute {}".format(redistribute["redist_type"])
|
|
||||||
for red_type in redistribute_data:
|
|
||||||
if "route_map" in red_type:
|
|
||||||
cmd = cmd + " route-map {}".format(red_type["route_map"])
|
|
||||||
del_action = redistribute.setdefault("delete", False)
|
|
||||||
if del_action:
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
# area information
|
|
||||||
area_data = ospf_data.setdefault("area", {})
|
|
||||||
if area_data:
|
|
||||||
for area in area_data:
|
|
||||||
if "id" not in area:
|
|
||||||
logger.debug(
|
|
||||||
"Router %s: 'area id' not present in " "input_dict", router
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cmd = "area {}".format(area["id"])
|
|
||||||
|
|
||||||
if "type" in area:
|
|
||||||
cmd = cmd + " {}".format(area["type"])
|
|
||||||
|
|
||||||
del_action = area.setdefault("delete", False)
|
|
||||||
if del_action:
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
# def route information
|
|
||||||
def_rte_data = ospf_data.setdefault("default-information", {})
|
|
||||||
if def_rte_data:
|
|
||||||
if "originate" not in def_rte_data:
|
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"Router %s: 'originate key' not present in " "input_dict", router
|
"Router %s: 'redist_type' not present in " "input_dict", router
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
cmd = "default-information originate"
|
cmd = "redistribute {}".format(redistribute["redist_type"])
|
||||||
|
for red_type in redistribute_data:
|
||||||
if "always" in def_rte_data:
|
if "route_map" in red_type:
|
||||||
cmd = cmd + " always"
|
cmd = cmd + " route-map {}".format(red_type["route_map"])
|
||||||
|
del_action = redistribute.setdefault("delete", False)
|
||||||
if "metric" in def_rte_data:
|
|
||||||
cmd = cmd + " metric {}".format(def_rte_data["metric"])
|
|
||||||
|
|
||||||
if "metric-type" in def_rte_data:
|
|
||||||
cmd = cmd + " metric-type {}".format(def_rte_data["metric-type"])
|
|
||||||
|
|
||||||
if "route-map" in def_rte_data:
|
|
||||||
cmd = cmd + " route-map {}".format(def_rte_data["route-map"])
|
|
||||||
|
|
||||||
del_action = def_rte_data.setdefault("delete", False)
|
|
||||||
if del_action:
|
if del_action:
|
||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
# area interface information for ospf6d only
|
# area information
|
||||||
if ospf == "ospf6":
|
area_data = ospf_data.setdefault("area", {})
|
||||||
area_iface = ospf_data.setdefault("neighbors", {})
|
if area_data:
|
||||||
if area_iface:
|
for area in area_data:
|
||||||
for neighbor in area_iface:
|
if "id" not in area:
|
||||||
if "area" in area_iface[neighbor]:
|
logger.debug(
|
||||||
|
"Router %s: 'area id' not present in " "input_dict", router
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
cmd = "area {}".format(area["id"])
|
||||||
|
|
||||||
|
if "type" in area:
|
||||||
|
cmd = cmd + " {}".format(area["type"])
|
||||||
|
|
||||||
|
del_action = area.setdefault("delete", False)
|
||||||
|
if del_action:
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
#def route information
|
||||||
|
def_rte_data = ospf_data.setdefault("default-information", {})
|
||||||
|
if def_rte_data:
|
||||||
|
if "originate" not in def_rte_data:
|
||||||
|
logger.debug("Router %s: 'originate key' not present in "
|
||||||
|
"input_dict", router)
|
||||||
|
else:
|
||||||
|
cmd = "default-information originate"
|
||||||
|
|
||||||
|
if "always" in def_rte_data:
|
||||||
|
cmd = cmd + " always"
|
||||||
|
|
||||||
|
if "metric" in def_rte_data:
|
||||||
|
cmd = cmd + " metric {}".format(def_rte_data["metric"])
|
||||||
|
|
||||||
|
if "metric-type" in def_rte_data:
|
||||||
|
cmd = cmd + " metric-type {}".format(def_rte_data[
|
||||||
|
"metric-type"])
|
||||||
|
|
||||||
|
if "route-map" in def_rte_data:
|
||||||
|
cmd = cmd + " route-map {}".format(def_rte_data["route-map"])
|
||||||
|
|
||||||
|
del_action = def_rte_data.setdefault("delete", False)
|
||||||
|
if del_action:
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
# area interface information for ospf6d only
|
||||||
|
if ospf == "ospf6":
|
||||||
|
area_iface = ospf_data.setdefault("neighbors", {})
|
||||||
|
if area_iface:
|
||||||
|
for neighbor in area_iface:
|
||||||
|
if "area" in area_iface[neighbor]:
|
||||||
|
iface = input_dict[router]["links"][neighbor]["interface"]
|
||||||
|
cmd = "interface {} area {}".format(
|
||||||
|
iface, area_iface[neighbor]["area"]
|
||||||
|
)
|
||||||
|
if area_iface[neighbor].setdefault("delete", False):
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if "area" in input_dict[router]['links'][neighbor][
|
||||||
|
'ospf6']:
|
||||||
iface = input_dict[router]["links"][neighbor]["interface"]
|
iface = input_dict[router]["links"][neighbor]["interface"]
|
||||||
cmd = "interface {} area {}".format(
|
cmd = "interface {} area {}".format(
|
||||||
iface, area_iface[neighbor]["area"]
|
iface, input_dict[router]['links'][neighbor][
|
||||||
)
|
'ospf6']['area'])
|
||||||
if area_iface[neighbor].setdefault("delete", False):
|
if input_dict[router]['links'][neighbor].setdefault(
|
||||||
|
"delete", False):
|
||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
except KeyError:
|
||||||
try:
|
|
||||||
if "area" in input_dict[router]["links"][neighbor]["ospf6"]:
|
|
||||||
iface = input_dict[router]["links"][neighbor]["interface"]
|
|
||||||
cmd = "interface {} area {}".format(
|
|
||||||
iface,
|
|
||||||
input_dict[router]["links"][neighbor]["ospf6"]["area"],
|
|
||||||
)
|
|
||||||
if input_dict[router]["links"][neighbor].setdefault(
|
|
||||||
"delete", False
|
|
||||||
):
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
except KeyError:
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# summary information
|
|
||||||
summary_data = ospf_data.setdefault("summary-address", {})
|
|
||||||
if summary_data:
|
|
||||||
for summary in summary_data:
|
|
||||||
if "prefix" not in summary:
|
|
||||||
logger.debug(
|
|
||||||
"Router %s: 'summary-address' not present in " "input_dict",
|
|
||||||
router,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
cmd = "summary {}/{}".format(summary["prefix"], summary["mask"])
|
|
||||||
|
|
||||||
_tag = summary.setdefault("tag", None)
|
# summary information
|
||||||
if _tag:
|
summary_data = ospf_data.setdefault("summary-address", {})
|
||||||
cmd = "{} tag {}".format(cmd, _tag)
|
if summary_data:
|
||||||
|
for summary in summary_data:
|
||||||
_advertise = summary.setdefault("advertise", True)
|
if "prefix" not in summary:
|
||||||
if not _advertise:
|
logger.debug(
|
||||||
cmd = "{} no-advertise".format(cmd)
|
"Router %s: 'summary-address' not present in " "input_dict",
|
||||||
|
router,
|
||||||
del_action = summary.setdefault("delete", False)
|
|
||||||
if del_action:
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
# ospf gr information
|
|
||||||
gr_data = ospf_data.setdefault("graceful-restart", {})
|
|
||||||
if gr_data:
|
|
||||||
|
|
||||||
if "opaque" in gr_data and gr_data["opaque"]:
|
|
||||||
cmd = "capability opaque"
|
|
||||||
if gr_data.setdefault("delete", False):
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
if "helper-only" in gr_data and not gr_data["helper-only"]:
|
|
||||||
cmd = "graceful-restart helper-only"
|
|
||||||
if gr_data.setdefault("delete", False):
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
elif "helper-only" in gr_data and type(gr_data["helper-only"]) is list:
|
|
||||||
for rtrs in gr_data["helper-only"]:
|
|
||||||
cmd = "graceful-restart helper-only {}".format(rtrs)
|
|
||||||
if gr_data.setdefault("delete", False):
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
if "helper" in gr_data:
|
|
||||||
if type(gr_data["helper"]) is not list:
|
|
||||||
gr_data["helper"] = list(gr_data["helper"])
|
|
||||||
for helper_role in gr_data["helper"]:
|
|
||||||
cmd = "graceful-restart helper {}".format(helper_role)
|
|
||||||
if gr_data.setdefault("delete", False):
|
|
||||||
cmd = "no {}".format(cmd)
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
if "supported-grace-time" in gr_data:
|
|
||||||
cmd = "graceful-restart helper supported-grace-time {}".format(
|
|
||||||
gr_data["supported-grace-time"]
|
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
cmd = "summary {}/{}".format(summary["prefix"], summary["mask"])
|
||||||
|
|
||||||
|
_tag = summary.setdefault("tag", None)
|
||||||
|
if _tag:
|
||||||
|
cmd = "{} tag {}".format(cmd, _tag)
|
||||||
|
|
||||||
|
_advertise = summary.setdefault("advertise", True)
|
||||||
|
if not _advertise:
|
||||||
|
cmd = "{} no-advertise".format(cmd)
|
||||||
|
|
||||||
|
del_action = summary.setdefault("delete", False)
|
||||||
|
if del_action:
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
# ospf gr information
|
||||||
|
gr_data = ospf_data.setdefault("graceful-restart", {})
|
||||||
|
if gr_data:
|
||||||
|
|
||||||
|
if "opaque" in gr_data and gr_data["opaque"]:
|
||||||
|
cmd = "capability opaque"
|
||||||
|
if gr_data.setdefault("delete", False):
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
|
if "helper-only" in gr_data and not gr_data["helper-only"]:
|
||||||
|
cmd = "graceful-restart helper-only"
|
||||||
|
if gr_data.setdefault("delete", False):
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
elif "helper-only" in gr_data and type(gr_data["helper-only"]) is list:
|
||||||
|
for rtrs in gr_data["helper-only"]:
|
||||||
|
cmd = "graceful-restart helper-only {}".format(rtrs)
|
||||||
if gr_data.setdefault("delete", False):
|
if gr_data.setdefault("delete", False):
|
||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if "helper" in gr_data:
|
||||||
tgen, router, config_data, "ospf", build, load_config
|
if type(gr_data["helper"]) is not list:
|
||||||
)
|
gr_data["helper"] = list(gr_data["helper"])
|
||||||
|
for helper_role in gr_data["helper"]:
|
||||||
|
cmd = "graceful-restart helper {}".format(helper_role)
|
||||||
|
if gr_data.setdefault("delete", False):
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
except InvalidCLIError:
|
if "supported-grace-time" in gr_data:
|
||||||
# Traceback
|
cmd = "graceful-restart helper supported-grace-time {}".format(
|
||||||
errormsg = traceback.format_exc()
|
gr_data["supported-grace-time"]
|
||||||
logger.error(errormsg)
|
)
|
||||||
return errormsg
|
if gr_data.setdefault("delete", False):
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
|
config_data.append(cmd)
|
||||||
|
|
||||||
logger.debug("Exiting lib API: create_ospf_global()")
|
logger.debug("Exiting lib API: create_ospf_global()")
|
||||||
return result
|
|
||||||
|
return config_data
|
||||||
|
|
||||||
|
|
||||||
def create_router_ospf6(tgen, topo, input_dict=None, build=False, load_config=True):
|
def create_router_ospf6(tgen, topo, input_dict=None, build=False, load_config=True):
|
||||||
@ -410,14 +405,27 @@ def create_router_ospf6(tgen, topo, input_dict=None, build=False, load_config=Tr
|
|||||||
else:
|
else:
|
||||||
topo = topo["routers"]
|
topo = topo["routers"]
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "ospf6" not in input_dict[router]:
|
if "ospf6" not in input_dict[router]:
|
||||||
logger.debug("Router %s: 'ospf6' not present in input_dict", router)
|
logger.debug("Router %s: 'ospf6' not present in input_dict", router)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
result = __create_ospf_global(
|
config_data = __create_ospf_global(
|
||||||
tgen, input_dict, router, build, load_config, "ospf6"
|
tgen, input_dict, router, build, load_config, "ospf6"
|
||||||
)
|
)
|
||||||
|
if config_data:
|
||||||
|
config_data_dict[router] = config_data
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "ospf6", build, load_config
|
||||||
|
)
|
||||||
|
except InvalidCLIError:
|
||||||
|
logger.error("create_router_ospf6", exc_info=True)
|
||||||
|
result = False
|
||||||
|
|
||||||
logger.debug("Exiting lib API: create_router_ospf6()")
|
logger.debug("Exiting lib API: create_router_ospf6()")
|
||||||
return result
|
return result
|
||||||
@ -462,6 +470,9 @@ def config_ospf_interface(tgen, topo, input_dict=None, build=False, load_config=
|
|||||||
input_dict = deepcopy(topo)
|
input_dict = deepcopy(topo)
|
||||||
else:
|
else:
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
config_data = []
|
config_data = []
|
||||||
for lnk in input_dict[router]["links"].keys():
|
for lnk in input_dict[router]["links"].keys():
|
||||||
@ -546,10 +557,14 @@ def config_ospf_interface(tgen, topo, input_dict=None, build=False, load_config=
|
|||||||
|
|
||||||
if build:
|
if build:
|
||||||
return config_data
|
return config_data
|
||||||
else:
|
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, router, config_data, "interface_config", build=build
|
config_data_dict[router] = config_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "interface_config", build=build
|
||||||
|
)
|
||||||
|
|
||||||
logger.debug("Exiting lib API: config_ospf_interface()")
|
logger.debug("Exiting lib API: config_ospf_interface()")
|
||||||
return result
|
return result
|
||||||
|
|
||||||
@ -2339,6 +2354,9 @@ def config_ospf6_interface(tgen, topo, input_dict=None, build=False, load_config
|
|||||||
input_dict = deepcopy(topo)
|
input_dict = deepcopy(topo)
|
||||||
else:
|
else:
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
config_data = []
|
config_data = []
|
||||||
for lnk in input_dict[router]['links'].keys():
|
for lnk in input_dict[router]['links'].keys():
|
||||||
@ -2409,10 +2427,14 @@ def config_ospf6_interface(tgen, topo, input_dict=None, build=False, load_config
|
|||||||
|
|
||||||
if build:
|
if build:
|
||||||
return config_data
|
return config_data
|
||||||
else:
|
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, router, config_data, "interface_config", build=build
|
config_data_dict[router] = config_data
|
||||||
)
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "interface_config", build=build
|
||||||
|
)
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@ from lib.topolog import logger
|
|||||||
# Import common_config to use commomnly used APIs
|
# Import common_config to use commomnly used APIs
|
||||||
from lib.common_config import (
|
from lib.common_config import (
|
||||||
create_common_configuration,
|
create_common_configuration,
|
||||||
|
create_common_configurations,
|
||||||
InvalidCLIError,
|
InvalidCLIError,
|
||||||
retry,
|
retry,
|
||||||
run_frr_cmd,
|
run_frr_cmd,
|
||||||
@ -79,28 +80,38 @@ def create_pim_config(tgen, topo, input_dict=None, build=False, load_config=True
|
|||||||
else:
|
else:
|
||||||
topo = topo["routers"]
|
topo = topo["routers"]
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
result = _enable_disable_pim(tgen, topo, input_dict, router, build)
|
config_data = _enable_disable_pim_config(tgen, topo, input_dict, router, build)
|
||||||
|
|
||||||
|
if config_data:
|
||||||
|
config_data_dict[router] = config_data
|
||||||
|
|
||||||
|
# Now add RP config to all routers
|
||||||
|
for router in input_dict.keys():
|
||||||
if "pim" not in input_dict[router]:
|
if "pim" not in input_dict[router]:
|
||||||
logger.debug("Router %s: 'pim' is not present in " "input_dict", router)
|
|
||||||
continue
|
continue
|
||||||
|
if "rp" not in input_dict[router]["pim"]:
|
||||||
|
continue
|
||||||
|
_add_pim_rp_config(
|
||||||
|
tgen, topo, input_dict, router, build, config_data_dict
|
||||||
|
)
|
||||||
|
|
||||||
if result is True:
|
try:
|
||||||
if "rp" not in input_dict[router]["pim"]:
|
result = create_common_configurations(
|
||||||
continue
|
tgen, config_data_dict, "pim", build, load_config
|
||||||
|
)
|
||||||
result = _create_pim_rp_config(
|
except InvalidCLIError:
|
||||||
tgen, topo, input_dict, router, build, load_config
|
logger.error("create_pim_config", exc_info=True)
|
||||||
)
|
result = False
|
||||||
if result is not True:
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _create_pim_rp_config(tgen, topo, input_dict, router, build=False, load_config=False):
|
def _add_pim_rp_config(tgen, topo, input_dict, router, build, config_data_dict):
|
||||||
"""
|
"""
|
||||||
Helper API to create pim RP configurations.
|
Helper API to create pim RP configurations.
|
||||||
|
|
||||||
@ -111,13 +122,12 @@ def _create_pim_rp_config(tgen, topo, input_dict, router, build=False, load_conf
|
|||||||
* `input_dict` : Input dict data, required when configuring from testcase
|
* `input_dict` : Input dict data, required when configuring from testcase
|
||||||
* `router` : router id to be configured.
|
* `router` : router id to be configured.
|
||||||
* `build` : Only for initial setup phase this is set as True.
|
* `build` : Only for initial setup phase this is set as True.
|
||||||
|
* `config_data_dict` : OUT: adds `router` config to dictinary
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
True or False
|
None
|
||||||
"""
|
"""
|
||||||
|
|
||||||
result = False
|
|
||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
|
|
||||||
pim_data = input_dict[router]["pim"]
|
pim_data = input_dict[router]["pim"]
|
||||||
@ -125,7 +135,6 @@ def _create_pim_rp_config(tgen, topo, input_dict, router, build=False, load_conf
|
|||||||
|
|
||||||
# Configure this RP on every router.
|
# Configure this RP on every router.
|
||||||
for dut in tgen.routers():
|
for dut in tgen.routers():
|
||||||
|
|
||||||
# At least one interface must be enabled for PIM on the router
|
# At least one interface must be enabled for PIM on the router
|
||||||
pim_if_enabled = False
|
pim_if_enabled = False
|
||||||
for destLink, data in topo[dut]["links"].items():
|
for destLink, data in topo[dut]["links"].items():
|
||||||
@ -193,22 +202,11 @@ def _create_pim_rp_config(tgen, topo, input_dict, router, build=False, load_conf
|
|||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
try:
|
if config_data:
|
||||||
result = create_common_configuration(
|
if dut not in config_data_dict:
|
||||||
tgen, dut, config_data, "pim", build, load_config
|
config_data_dict[dut] = config_data
|
||||||
)
|
else:
|
||||||
if result is not True:
|
config_data_dict[dut].extend(config_data)
|
||||||
logger.error("Error applying PIM config", exc_info=True)
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
|
||||||
return False
|
|
||||||
|
|
||||||
except InvalidCLIError as error:
|
|
||||||
logger.error("Error applying PIM config: %s", error, exc_info=error)
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def create_igmp_config(tgen, topo, input_dict=None, build=False):
|
def create_igmp_config(tgen, topo, input_dict=None, build=False):
|
||||||
@ -255,6 +253,9 @@ def create_igmp_config(tgen, topo, input_dict=None, build=False):
|
|||||||
else:
|
else:
|
||||||
topo = topo["routers"]
|
topo = topo["routers"]
|
||||||
input_dict = deepcopy(input_dict)
|
input_dict = deepcopy(input_dict)
|
||||||
|
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for router in input_dict.keys():
|
for router in input_dict.keys():
|
||||||
if "igmp" not in input_dict[router]:
|
if "igmp" not in input_dict[router]:
|
||||||
logger.debug("Router %s: 'igmp' is not present in " "input_dict", router)
|
logger.debug("Router %s: 'igmp' is not present in " "input_dict", router)
|
||||||
@ -300,21 +301,22 @@ def create_igmp_config(tgen, topo, input_dict=None, build=False):
|
|||||||
cmd = "no {}".format(cmd)
|
cmd = "no {}".format(cmd)
|
||||||
|
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
try:
|
if config_data:
|
||||||
|
config_data_dict[router] = config_data
|
||||||
|
|
||||||
result = create_common_configuration(
|
try:
|
||||||
tgen, router, config_data, "interface_config", build=build
|
result = create_common_configurations(
|
||||||
)
|
tgen, config_data_dict, "interface_config", build=build
|
||||||
except InvalidCLIError:
|
)
|
||||||
errormsg = traceback.format_exc()
|
except InvalidCLIError:
|
||||||
logger.error(errormsg)
|
logger.error("create_igmp_config", exc_info=True)
|
||||||
return errormsg
|
result = False
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def _enable_disable_pim(tgen, topo, input_dict, router, build=False):
|
def _enable_disable_pim_config(tgen, topo, input_dict, router, build=False):
|
||||||
"""
|
"""
|
||||||
Helper API to enable or disable pim on interfaces
|
Helper API to enable or disable pim on interfaces
|
||||||
|
|
||||||
@ -328,57 +330,40 @@ def _enable_disable_pim(tgen, topo, input_dict, router, build=False):
|
|||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
True or False
|
list of config
|
||||||
"""
|
"""
|
||||||
result = False
|
|
||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
|
||||||
try:
|
|
||||||
config_data = []
|
|
||||||
|
|
||||||
# Enable pim on interfaces
|
config_data = []
|
||||||
for destRouterLink, data in sorted(topo[router]["links"].items()):
|
|
||||||
if "pim" in data and data["pim"] == "enable":
|
|
||||||
|
|
||||||
# Loopback interfaces
|
# Enable pim on interfaces
|
||||||
if "type" in data and data["type"] == "loopback":
|
for destRouterLink, data in sorted(topo[router]["links"].items()):
|
||||||
interface_name = destRouterLink
|
if "pim" in data and data["pim"] == "enable":
|
||||||
else:
|
# Loopback interfaces
|
||||||
interface_name = data["interface"]
|
if "type" in data and data["type"] == "loopback":
|
||||||
|
interface_name = destRouterLink
|
||||||
|
else:
|
||||||
|
interface_name = data["interface"]
|
||||||
|
|
||||||
cmd = "interface {}".format(interface_name)
|
cmd = "interface {}".format(interface_name)
|
||||||
|
config_data.append(cmd)
|
||||||
|
config_data.append("ip pim")
|
||||||
|
|
||||||
|
# pim global config
|
||||||
|
if "pim" in input_dict[router]:
|
||||||
|
pim_data = input_dict[router]["pim"]
|
||||||
|
del_action = pim_data.setdefault("delete", False)
|
||||||
|
for t in [
|
||||||
|
"join-prune-interval",
|
||||||
|
"keep-alive-timer",
|
||||||
|
"register-suppress-time",
|
||||||
|
]:
|
||||||
|
if t in pim_data:
|
||||||
|
cmd = "ip pim {} {}".format(t, pim_data[t])
|
||||||
|
if del_action:
|
||||||
|
cmd = "no {}".format(cmd)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
config_data.append("ip pim")
|
|
||||||
|
|
||||||
result = create_common_configuration(
|
return config_data
|
||||||
tgen, router, config_data, "interface_config", build=build
|
|
||||||
)
|
|
||||||
if result is not True:
|
|
||||||
return False
|
|
||||||
|
|
||||||
config_data = []
|
|
||||||
if "pim" in input_dict[router]:
|
|
||||||
pim_data = input_dict[router]["pim"]
|
|
||||||
for t in [
|
|
||||||
"join-prune-interval",
|
|
||||||
"keep-alive-timer",
|
|
||||||
"register-suppress-time",
|
|
||||||
]:
|
|
||||||
if t in pim_data:
|
|
||||||
cmd = "ip pim {} {}".format(t, pim_data[t])
|
|
||||||
config_data.append(cmd)
|
|
||||||
|
|
||||||
if config_data:
|
|
||||||
result = create_common_configuration(
|
|
||||||
tgen, router, config_data, "pim", build=build
|
|
||||||
)
|
|
||||||
except InvalidCLIError:
|
|
||||||
# Traceback
|
|
||||||
errormsg = traceback.format_exc()
|
|
||||||
logger.error(errormsg)
|
|
||||||
return errormsg
|
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
def find_rp_details(tgen, topo):
|
def find_rp_details(tgen, topo):
|
||||||
@ -451,7 +436,9 @@ def configure_pim_force_expire(tgen, topo, input_dict, build=False):
|
|||||||
|
|
||||||
result = False
|
result = False
|
||||||
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Entering lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
config_data_dict = {}
|
||||||
|
|
||||||
for dut in input_dict.keys():
|
for dut in input_dict.keys():
|
||||||
if "pim" not in input_dict[dut]:
|
if "pim" not in input_dict[dut]:
|
||||||
@ -459,8 +446,8 @@ def configure_pim_force_expire(tgen, topo, input_dict, build=False):
|
|||||||
|
|
||||||
pim_data = input_dict[dut]["pim"]
|
pim_data = input_dict[dut]["pim"]
|
||||||
|
|
||||||
|
config_data = []
|
||||||
if "force_expire" in pim_data:
|
if "force_expire" in pim_data:
|
||||||
config_data = []
|
|
||||||
force_expire_data = pim_data["force_expire"]
|
force_expire_data = pim_data["force_expire"]
|
||||||
|
|
||||||
for source, groups in force_expire_data.items():
|
for source, groups in force_expire_data.items():
|
||||||
@ -473,17 +460,15 @@ def configure_pim_force_expire(tgen, topo, input_dict, build=False):
|
|||||||
)
|
)
|
||||||
config_data.append(cmd)
|
config_data.append(cmd)
|
||||||
|
|
||||||
result = create_common_configuration(
|
if config_data:
|
||||||
tgen, dut, config_data, "pim", build=build
|
config_data_dict[dut] = config_data
|
||||||
)
|
|
||||||
if result is not True:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
result = create_common_configurations(
|
||||||
|
tgen, config_data_dict, "pim", build=build
|
||||||
|
)
|
||||||
except InvalidCLIError:
|
except InvalidCLIError:
|
||||||
# Traceback
|
logger.error("configure_pim_force_expire", exc_info=True)
|
||||||
errormsg = traceback.format_exc()
|
result = False
|
||||||
logger.error(errormsg)
|
|
||||||
return errormsg
|
|
||||||
|
|
||||||
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
logger.debug("Exiting lib API: {}".format(sys._getframe().f_code.co_name))
|
||||||
return result
|
return result
|
||||||
@ -963,7 +948,7 @@ def verify_join_state_and_timer(tgen, dut, iif, src_address, group_addresses, ex
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@retry(retry_timeout=80)
|
@retry(retry_timeout=120)
|
||||||
def verify_ip_mroutes(
|
def verify_ip_mroutes(
|
||||||
tgen, dut, src_address, group_addresses, iif, oil, return_uptime=False, mwait=0, expected=True
|
tgen, dut, src_address, group_addresses, iif, oil, return_uptime=False, mwait=0, expected=True
|
||||||
):
|
):
|
||||||
@ -2026,6 +2011,7 @@ def add_rp_interfaces_and_pim_config(tgen, topo, interface, rp, rp_mapping):
|
|||||||
config_data.append("ip address {}".format(_rp))
|
config_data.append("ip address {}".format(_rp))
|
||||||
config_data.append("ip pim")
|
config_data.append("ip pim")
|
||||||
|
|
||||||
|
# Why not config just once, why per group?
|
||||||
result = create_common_configuration(
|
result = create_common_configuration(
|
||||||
tgen, rp, config_data, "interface_config"
|
tgen, rp, config_data, "interface_config"
|
||||||
)
|
)
|
||||||
|
@ -34,7 +34,7 @@ from lib.topolog import logger
|
|||||||
from lib.common_config import (
|
from lib.common_config import (
|
||||||
number_to_row,
|
number_to_row,
|
||||||
number_to_column,
|
number_to_column,
|
||||||
load_config_to_router,
|
load_config_to_routers,
|
||||||
create_interfaces_cfg,
|
create_interfaces_cfg,
|
||||||
create_static_routes,
|
create_static_routes,
|
||||||
create_prefix_lists,
|
create_prefix_lists,
|
||||||
@ -342,10 +342,8 @@ def build_config_from_json(tgen, topo, save_bkup=True):
|
|||||||
|
|
||||||
func_dict.get(func_type)(tgen, data, build=True)
|
func_dict.get(func_type)(tgen, data, build=True)
|
||||||
|
|
||||||
for router in sorted(topo["routers"].keys()):
|
routers = sorted(topo["routers"].keys())
|
||||||
logger.debug("Configuring router {}...".format(router))
|
result = load_config_to_routers(tgen, routers, save_bkup)
|
||||||
|
if not result:
|
||||||
result = load_config_to_router(tgen, router, save_bkup)
|
logger.info("build_config_from_json: failed to configure topology")
|
||||||
if not result:
|
pytest.exit(1)
|
||||||
logger.info("Failed while configuring {}".format(router))
|
|
||||||
pytest.exit(1)
|
|
||||||
|
Loading…
Reference in New Issue
Block a user