diff --git a/README.md b/README.md
index d4f2780c5..a64831ed6 100644
--- a/README.md
+++ b/README.md
@@ -63,7 +63,7 @@ You can also include it in a `requirements.yml` file and install it with `ansibl
---
collections:
- name: cisco.dcnm
- version: 3.2.0
+ version: 3.3.0
```
## Using this collection
diff --git a/docs/cisco.dcnm.dcnm_inventory_module.rst b/docs/cisco.dcnm.dcnm_inventory_module.rst
index 0aabc184d..9420708eb 100644
--- a/docs/cisco.dcnm.dcnm_inventory_module.rst
+++ b/docs/cisco.dcnm.dcnm_inventory_module.rst
@@ -428,6 +428,11 @@ Parameters
super_spine
border_super_spine
border_gateway_super_spine
+ access
+ aggregation
+ edge_router
+ core_router
+ tor
diff --git a/docs/cisco.dcnm.dcnm_vrf_module.rst b/docs/cisco.dcnm.dcnm_vrf_module.rst
index b09eaa15e..97ef5928c 100644
--- a/docs/cisco.dcnm.dcnm_vrf_module.rst
+++ b/docs/cisco.dcnm.dcnm_vrf_module.rst
@@ -197,6 +197,7 @@ Parameters
string
+ / required
|
@@ -292,7 +293,6 @@ Parameters
string
- / required
|
@@ -1099,15 +1099,20 @@ Examples
- ip_address: 192.168.1.224
- ip_address: 192.168.1.225
vrf_lite:
- # All parameters under vrf_lite except peer_vrf are optional and
- # will be supplied by DCNM when omitted in the playbook
- - peer_vrf: test_vrf_1 # peer_vrf is mandatory
- interface: Ethernet1/16 # optional
+ - peer_vrf: test_vrf_1 # optional
+ interface: Ethernet1/16 # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
neighbor_ipv6: 2010::10:34:0:3 # optional
dot1q: 2 # dot1q can be got from dcnm/optional
+ - peer_vrf: test_vrf_2 # optional
+ interface: Ethernet1/17 # mandatory
+ ipv4_addr: 20.33.0.2/30 # optional
+ neighbor_ipv4: 20.33.0.1 # optional
+ ipv6_addr: 3010::10:34:0:7/64 # optional
+ neighbor_ipv6: 3010::10:34:0:3 # optional
+ dot1q: 3 # dot1q can be got from dcnm/optional
# The two VRFs below will be replaced in the target fabric.
- name: Replace vrfs
diff --git a/galaxy.yml b/galaxy.yml
index 9bca57617..dad67956f 100644
--- a/galaxy.yml
+++ b/galaxy.yml
@@ -1,7 +1,7 @@
---
namespace: cisco
name: dcnm
-version: 3.2.0
+version: 3.3.0
readme: README.md
authors:
- Shrishail Kariyappanavar
@@ -13,5 +13,5 @@ description: Ansible collection for the Cisco Nexus® Dashboard Fabric Controlle
license: Apache-2.0
tags: [cisco, ndfc, dcnm, nxos, networking, vxlan]
dependencies:
- "ansible.netcommon": ">=2.6.1,<=4.1.0"
+ "ansible.netcommon": ">=2.6.1"
repository: https://github.com/CiscoDevNet/ansible-dcnm
diff --git a/plugins/action/dcnm_vrf.py b/plugins/action/dcnm_vrf.py
index 00bb9695f..a8a473049 100644
--- a/plugins/action/dcnm_vrf.py
+++ b/plugins/action/dcnm_vrf.py
@@ -38,6 +38,13 @@ def run(self, tmp=None, task_vars=None):
if "vlan_id" in at:
msg = "Playbook parameter vlan_id should not be specified under the attach: block. Please specify this under the config: block instead" # noqa
return {"failed": True, "msg": msg}
+ if "vrf_lite" in at:
+ try:
+ for vl in at["vrf_lite"]:
+ continue
+ except TypeError:
+ msg = "Please specify interface parameter under vrf_lite section in the playbook"
+ return {"failed": True, "msg": msg}
self.result = super(ActionModule, self).run(task_vars=task_vars)
return self.result
diff --git a/plugins/module_utils/network/dcnm/dcnm.py b/plugins/module_utils/network/dcnm/dcnm.py
index 73ffcbed0..458199a69 100644
--- a/plugins/module_utils/network/dcnm/dcnm.py
+++ b/plugins/module_utils/network/dcnm/dcnm.py
@@ -42,11 +42,15 @@ def validate_ip_address_format(type, item, invalid_params):
subnet = item.split("/")[1]
if not subnet or int(subnet) > mask_len:
invalid_params.append(
- "{0} : Invalid {1} gw/subnet syntax".format(item, addr_type)
+ "{0} : Invalid {1} gw/subnet syntax".format(
+ item, addr_type
+ )
)
else:
invalid_params.append(
- "{0} : Invalid {1} gw/subnet syntax".format(item, addr_type)
+ "{0} : Invalid {1} gw/subnet syntax".format(
+ item, addr_type
+ )
)
try:
socket.inet_pton(addr_family, address)
@@ -138,7 +142,9 @@ def validate_list_of_dicts(param_list, spec, module=None):
module.no_log_values.add(item)
else:
msg = "\n\n'{0}' is a no_log parameter".format(param)
- msg += "\nAnsible module object must be passed to this "
+ msg += (
+ "\nAnsible module object must be passed to this "
+ )
msg += "\nfunction to ensure it is not logged\n\n"
raise Exception(msg)
@@ -158,6 +164,7 @@ def get_fabric_inventory_details(module, fabric):
conn = Connection(module._socket_path)
if conn.get_version() == 12:
path = "/appcenter/cisco/ndfc/api/v1/lan-fabric" + path
+ path += "/switchesByFabric"
count = 1
while rc is False:
@@ -188,10 +195,13 @@ def get_fabric_inventory_details(module, fabric):
raise Exception(response)
for device_data in response.get("DATA"):
- key = device_data.get("ipAddress")
+
+ if device_data.get("ipAddress", "") != "":
+ key = device_data.get("ipAddress")
+ else:
+ key = device_data.get("logicalName")
inventory_data[key] = device_data
rc = True
-
return inventory_data
@@ -204,7 +214,9 @@ def get_ip_sn_dict(inventory_data):
ip = inventory_data[device_key].get("ipAddress")
sn = inventory_data[device_key].get("serialNumber")
hn = inventory_data[device_key].get("logicalName")
- ip_sn.update({ip: sn})
+
+ if ip != "":
+ ip_sn.update({ip: sn})
hn_sn.update({hn: sn})
return ip_sn, hn_sn
@@ -247,9 +259,7 @@ def dcnm_get_ip_addr_info(module, sw_elem, ip_sn, hn_sn):
msg_dict = {"Error": ""}
msg = 'Given switch elem = "{}" is not a valid one for this fabric\n'
- msg1 = (
- 'Given switch elem = "{}" cannot be validated, provide a valid ip_sn object\n'
- )
+ msg1 = 'Given switch elem = "{}" cannot be validated, provide a valid ip_sn object\n'
# Check if the given sw_elem is a v4 ip_addr
try:
@@ -377,7 +387,9 @@ def dcnm_reset_connection(module):
conn = Connection(module._socket_path)
conn.logout()
- return conn.login(conn.get_option("remote_user"), conn.get_option("password"))
+ return conn.login(
+ conn.get_option("remote_user"), conn.get_option("password")
+ )
def dcnm_version_supported(module):
@@ -395,7 +407,10 @@ def dcnm_version_supported(module):
supported = None
data = None
- paths = ["/fm/fmrest/about/version", "/appcenter/cisco/ndfc/api/about/version"]
+ paths = [
+ "/fm/fmrest/about/version",
+ "/appcenter/cisco/ndfc/api/about/version",
+ ]
for path in paths:
response = dcnm_send(module, method, path)
if response["RETURN_CODE"] == 200:
@@ -476,7 +491,9 @@ def dcnm_get_url(module, fabric, path, items, module_name):
)
url = path.format(fabric, itemstr)
else:
- itemstr = ",".join(itemlist[iter * (len(itemlist) // send_count):])
+ itemstr = ",".join(
+ itemlist[iter * (len(itemlist) // send_count):]
+ )
url = path.format(fabric, itemstr)
att_objects = dcnm_send(module, method, url)
diff --git a/plugins/modules/dcnm_inventory.py b/plugins/modules/dcnm_inventory.py
index 0550c27a3..d691f9d09 100644
--- a/plugins/modules/dcnm_inventory.py
+++ b/plugins/modules/dcnm_inventory.py
@@ -85,7 +85,8 @@
description:
- Role which needs to be assigned to the switch
choices: ['leaf', 'spine', 'border', 'border_spine', 'border_gateway', 'border_gateway_spine',
- 'super_spine', 'border_super_spine', 'border_gateway_super_spine']
+ 'super_spine', 'border_super_spine', 'border_gateway_super_spine', 'access', 'aggregation',
+ 'edge_router', 'core_router', 'tor']
type: str
required: false
default: leaf
@@ -947,6 +948,11 @@ def validate_input(self):
"super_spine",
"border_super_spine",
"border_gateway_super_spine",
+ "access",
+ "aggregation",
+ "edge_router",
+ "core_router",
+ "tor"
],
default="leaf",
),
@@ -1091,6 +1097,11 @@ def validate_input(self):
"super_spine",
"border_super_spine",
"border_gateway_super_spine",
+ "access",
+ "aggregation",
+ "edge_router",
+ "core_router",
+ "tor",
"None",
],
default="None",
@@ -1316,7 +1327,6 @@ def set_lancred_switch(self, set_lan):
def lancred_all_switches(self):
- want_list = []
# Get Fabric Inventory Details
method = "GET"
path = "/fm/fmrest/lanConfig/getLanSwitchCredentials"
@@ -1356,7 +1366,6 @@ def lancred_all_switches(self):
def assign_role(self):
- want_list = []
method = "GET"
path = "/rest/control/fabrics/{0}/inventory".format(self.fabric)
if self.nd:
@@ -1484,7 +1493,6 @@ def config_save(self):
def config_deploy(self):
# config-deploy
- sernos = []
method = "POST"
path = "/rest/control/fabrics/{0}".format(self.fabric)
if self.nd:
diff --git a/plugins/modules/dcnm_links.py b/plugins/modules/dcnm_links.py
index 41567ab71..885df3f00 100644
--- a/plugins/modules/dcnm_links.py
+++ b/plugins/modules/dcnm_links.py
@@ -776,8 +776,6 @@
dcnm_get_ip_addr_info,
)
-from datetime import datetime
-
# Resource Class object which includes all the required methods and data to configure and maintain Links
class DcnmLinks:
@@ -790,6 +788,7 @@ class DcnmLinks:
"LINKS_GET_BY_FABRIC": "/rest/control/links/fabrics/{}",
"LINKS_CFG_DEPLOY": "/rest/control/fabrics/{}/config-deploy/",
"CONFIG_PREVIEW": "/rest/control/fabrics/{}/config-preview/",
+ "FABRIC_ACCESS_MODE": "/rest/control/fabrics/{}/accessmode",
},
12: {
"LINKS_GET_BY_SWITCH_PAIR": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/links",
@@ -799,6 +798,7 @@ class DcnmLinks:
"LINKS_GET_BY_FABRIC": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/links/fabrics/{}",
"LINKS_CFG_DEPLOY": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-deploy/",
"CONFIG_PREVIEW": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/config-preview/",
+ "FABRIC_ACCESS_MODE": "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{}/accessmode",
},
}
@@ -865,6 +865,8 @@ def __init__(self, module):
self.diff_modify = []
self.diff_delete = []
self.diff_deploy = {}
+ self.monitoring = []
+ self.meta_switches = []
self.fd = None
self.changed_dict = [
{
@@ -901,6 +903,46 @@ def log_msg(self, msg):
self.fd.write("\n")
self.fd.flush()
+ def dcnm_dump_have_db(self):
+
+ lhave = []
+
+ for have in self.have:
+ lhave.append(
+ {
+ "UUID": have["link-uuid"],
+ "SRC FABRIC": have["sw1-info"]["fabric-name"],
+ "SRC IF NAME": have["sw1-info"]["if-name"],
+ "SRC SNO": have["sw1-info"]["sw-serial-number"],
+ "SRC SYS NAME": have["sw1-info"]["sw-sys-name"],
+ "DST FABRIC": have["sw2-info"]["fabric-name"],
+ "DST IF NAME": have["sw2-info"]["if-name"],
+ "DST SNO": have["sw2-info"]["sw-serial-number"],
+ "DST SYS NAME": have["sw2-info"]["sw-sys-name"],
+ }
+ )
+ self.log_msg(f"HAVE = {lhave}\n")
+
+ def dcnm_print_have(self, have):
+
+ lhave = []
+
+ lhave.append(
+ {
+ "UUID": have["link-uuid"],
+ "SRC FABRIC": have["sw1-info"]["fabric-name"],
+ "SRC IF NAME": have["sw1-info"]["if-name"],
+ "SRC SNO": have["sw1-info"]["sw-serial-number"],
+ "SRC SYS NAME": have["sw1-info"]["sw-sys-name"],
+ "DST FABRIC": have["sw2-info"]["fabric-name"],
+ "DST IF NAME": have["sw2-info"]["if-name"],
+ "DST SNO": have["sw2-info"]["sw-serial-number"],
+ "DST SYS NAME": have["sw2-info"]["sw-sys-name"],
+ }
+ )
+
+ self.log_msg(f"have = {lhave}\n")
+
def dcnm_links_compare_ip_addresses(self, addr1, addr2):
"""
@@ -1375,19 +1417,44 @@ def dcnm_links_get_links_payload(self, link):
"destinationFabric": link.get("dst_fabric"),
"sourceInterface": link.get("src_interface"),
"destinationInterface": link.get("dst_interface"),
- "sourceDevice": self.ip_sn[link.get("src_device")],
- "destinationDevice": self.ip_sn[link.get("dst_device")],
}
+ if link.get("src_device") in self.ip_sn:
+ link_payload["sourceDevice"] = self.ip_sn[link.get("src_device")]
+ else:
+ link_payload["sourceDevice"] = self.hn_sn.get(
+ link["src_device"], ""
+ )
+
+ if link.get("dst_device") in self.ip_sn:
+ link_payload["destinationDevice"] = self.ip_sn[
+ link.get("dst_device")
+ ]
+ else:
+ link_payload["destinationDevice"] = self.hn_sn.get(
+ link["dst_device"], ""
+ )
+
+ # At this point link_payload will have sourceDevice set to proper SNO and destinationDevice is either
+ # set to a proper SNO or "".
+
+ if link_payload["sourceDevice"] in self.sn_hn:
+ link_payload["sourceSwitchName"] = self.sn_hn.get(
+ link_payload["sourceDevice"], "Switch1"
+ )
+ else:
+ link_payload["sourceSwitchName"] = link.get("src_device")
+
+ if link_payload["destinationDevice"] in self.sn_hn:
+ link_payload["destinationSwitchName"] = self.sn_hn.get(
+ link_payload["destinationDevice"], "Switch2"
+ )
+ else:
+ link_payload["destinationSwitchName"] = link.get("dst_device")
+
if self.module.params["state"] == "deleted":
return link_payload
- link_payload["sourceSwitchName"] = self.sn_hn.get(
- link_payload["sourceDevice"], "Switch1"
- )
- link_payload["destinationSwitchName"] = self.sn_hn.get(
- link_payload["destinationDevice"], "Switch2"
- )
link_payload["templateName"] = link.get("template")
# Intra and Inter fabric payloads are different. Build them separately
@@ -1918,10 +1985,27 @@ def dcnm_links_update_want(self):
and (cfg["dst_fabric"] == want["destinationFabric"])
and (cfg["src_interface"] == want["sourceInterface"])
and (cfg["dst_interface"] == want["destinationInterface"])
- and (self.ip_sn[cfg["src_device"]] == want["sourceDevice"])
and (
- self.ip_sn[cfg["dst_device"]]
- == want["destinationDevice"]
+ cfg["src_device"] in self.ip_sn
+ and self.ip_sn[cfg["src_device"]]
+ == want["sourceDevice"]
+ )
+ or (
+ cfg["src_device"] in self.hn_sn
+ and self.hn_sn[cfg["src_device"]]
+ == want["sourceDevice"]
+ )
+ and (
+ (
+ cfg["dst_device"] in self.ip_sn
+ and self.ip_sn[cfg["dst_device"]]
+ == want["destinationDevice"]
+ )
+ or (
+ cfg["dst_device"] in self.hn_sn
+ and self.hn_sn[cfg["dst_device"]]
+ == want["destinationDevice"]
+ )
)
and (cfg["template"] == want["templateName"])
)
@@ -1978,14 +2062,22 @@ def dcnm_links_get_links_info_from_dcnm(self, link):
"""
# link object is from self.want. These objets would have translated devices to serial numbers already.
- path = self.paths[
- "LINKS_GET_BY_SWITCH_PAIR"
- ] + "?switch1Sn={0}&switch2Sn={1}".format(
- link["sourceDevice"], link["destinationDevice"]
- )
- path = path + "&switch1IfName={0}&switch2IfName={1}".format(
- link["sourceInterface"], link["destinationInterface"]
- )
+
+ if (
+ link["sourceDevice"] in self.ip_sn.values()
+ and link["destinationDevice"] in self.ip_sn.values()
+ ):
+ path = self.paths[
+ "LINKS_GET_BY_SWITCH_PAIR"
+ ] + "?switch1Sn={0}&switch2Sn={1}".format(
+ link["sourceDevice"], link["destinationDevice"]
+ )
+ path = path + "&switch1IfName={0}&switch2IfName={1}".format(
+ link["sourceInterface"], link["destinationInterface"]
+ )
+ else:
+ # If devices are not managable, the path should not include them
+ path = self.paths["LINKS_GET_BY_SWITCH_PAIR"]
resp = dcnm_send(self.module, "GET", path)
@@ -2015,6 +2107,31 @@ def dcnm_links_get_links_info_from_dcnm(self, link):
link_elem["sw2-info"]["fabric-name"]
== link["destinationFabric"]
)
+ and (
+ link["sourceDevice"]
+ == link_elem["sw1-info"]["sw-serial-number"]
+ )
+ and (
+ (
+ link["destinationDevice"] in self.ip_sn.values()
+ and link["destinationDevice"]
+ == link_elem["sw2-info"]["sw-serial-number"]
+ )
+ or (
+ link["destinationSwitchName"]
+ + "-"
+ + link["destinationFabric"]
+ == link_elem["sw2-info"]["sw-serial-number"]
+ )
+ )
+ and (
+ link["sourceInterface"]
+ == link_elem["sw1-info"]["if-name"]
+ )
+ and (
+ link["destinationInterface"]
+ == link_elem["sw2-info"]["if-name"]
+ )
)
]
@@ -2747,12 +2864,15 @@ def dcnm_links_compare_Links(self, want):
)
and (
have["sw2-info"]["sw-serial-number"]
- == want["destinationDevice"]
+ == want["destinationDevice"] or
+ have["sw2-info"]["sw-serial-number"]
+ == want["destinationSwitchName"] + "-" + want["destinationFabric"]
)
)
]
for mlink in match_have:
+
if want["sourceFabric"] == want["destinationFabric"]:
return self.dcnm_links_compare_intra_fabric_link_params(
want, mlink
@@ -2816,7 +2936,7 @@ def dcnm_links_update_diff_deploy(self, fabric, device):
if self.diff_deploy.get(fabric, "") == "":
self.diff_deploy[fabric] = []
- if device not in self.diff_deploy[fabric]:
+ if device != "" and device not in self.diff_deploy[fabric]:
self.diff_deploy[fabric].append(device)
def dcnm_links_get_diff_merge(self):
@@ -2867,12 +2987,37 @@ def dcnm_links_get_diff_merge(self):
# If "deploy" flag is set to "true", then all pending configurations on the source and
# destination devices will be deployed.
if self.deploy:
- self.dcnm_links_update_diff_deploy(
- self.fabric, link["sourceDevice"]
- )
- self.dcnm_links_update_diff_deploy(
- link["destinationFabric"], link["destinationDevice"]
- )
+
+ if (
+ self.fabric not in self.monitoring
+ and link["sourceDevice"] in self.managable.values()
+ ):
+ self.dcnm_links_update_diff_deploy(
+ self.fabric, link["sourceDevice"]
+ )
+ else:
+ self.dcnm_links_update_diff_deploy(self.fabric, "")
+
+ # If source swithces are not manageable, then do not deploy anything on destination fabric to
+ # avoid inconsitencies.
+ if link["sourceDevice"] in self.managable.values():
+ if (
+ link["destinationFabric"] not in self.monitoring
+ and link["destinationDevice"]
+ in self.managable.values()
+ ):
+ self.dcnm_links_update_diff_deploy(
+ link["destinationFabric"],
+ link["destinationDevice"],
+ )
+ else:
+ self.dcnm_links_update_diff_deploy(
+ link["destinationFabric"], ""
+ )
+ else:
+ self.dcnm_links_update_diff_deploy(
+ link["destinationFabric"], ""
+ )
if self.diff_deploy != {}:
self.changed_dict[0]["deploy"].append(
@@ -2893,6 +3038,7 @@ def dcnm_links_get_diff_deleted(self):
None
"""
+ match_links = []
for link in self.links_info:
match_links = [
@@ -2904,12 +3050,18 @@ def dcnm_links_get_diff_deleted(self):
and (have["sw1-info"]["if-name"] == link["src_interface"])
and (have["sw2-info"]["if-name"] == link["dst_interface"])
and (
- have["sw1-info"]["sw-serial-number"]
- == self.ip_sn[link["src_device"]]
+ link["src_device"] in self.ip_sn
+ and have["sw1-info"]["sw-serial-number"]
+ == self.ip_sn.get(link["src_device"], "")
+ or have["sw1-info"]["sw-serial-number"]
+ == self.hn_sn.get(link["src_device"], "")
)
and (
- have["sw2-info"]["sw-serial-number"]
- == self.ip_sn[link["dst_device"]]
+ link["dst_device"] in self.ip_sn
+ and have["sw2-info"]["sw-serial-number"]
+ == self.ip_sn.get(link["dst_device"], "")
+ or have["sw2-info"]["sw-serial-number"]
+ == self.hn_sn.get(link["dst_device"], "")
)
)
]
@@ -2928,12 +3080,28 @@ def dcnm_links_get_diff_deleted(self):
}
)
- self.dcnm_links_update_diff_deploy(
- self.fabric, self.ip_sn[link["src_device"]]
- )
- self.dcnm_links_update_diff_deploy(
- link["dst_fabric"], self.ip_sn[link["dst_device"]]
- )
+ if self.deploy:
+ if (
+ self.fabric not in self.monitoring
+ and link["src_device"] in self.managable
+ ):
+ self.dcnm_links_update_diff_deploy(
+ self.fabric, self.ip_sn[link["src_device"]]
+ )
+ else:
+ self.dcnm_links_update_diff_deploy(self.fabric, "")
+
+ if (
+ link["dst_fabric"] not in self.monitoring
+ and link["dst_device"] in self.managable
+ ):
+ self.dcnm_links_update_diff_deploy(
+ link["dst_fabric"], self.ip_sn[link["dst_device"]]
+ )
+ else:
+ self.dcnm_links_update_diff_deploy(
+ link["dst_fabric"], ""
+ )
if self.diff_deploy != {}:
self.changed_dict[0]["deploy"].append(
@@ -2998,16 +3166,28 @@ def dcnm_links_get_diff_query(self):
and (
(link["src_device"] == "")
or (
- rlink["sw1-info"]["sw-serial-number"]
+ link["src_device"] in self.ip_sn
+ and rlink["sw1-info"]["sw-serial-number"]
== self.ip_sn[link["src_device"]]
)
+ or (
+ link["src_device"] in self.hn_sn
+ and rlink["sw1-info"]["sw-serial-number"]
+ == self.hn_sn[link["src_device"]]
+ )
)
and (
(link["dst_device"] == "")
or (
- rlink["sw2-info"]["sw-serial-number"]
+ link["dst_device"] in self.ip_sn
+ and rlink["sw2-info"]["sw-serial-number"]
== self.ip_sn[link["dst_device"]]
)
+ or (
+ link["dst_device"] in self.hn_sn
+ and rlink["sw2-info"]["sw-serial-number"]
+ == self.hn_sn[link["dst_device"]]
+ )
)
and (
(link["template"] == "")
@@ -3028,7 +3208,7 @@ def dcnm_links_deploy_to_switches(self):
resp = {}
resp["RETURN_CODE"] = 200
- for fabric in self.diff_deploy.keys():
+ for fabric in self.diff_deploy:
if self.diff_deploy[fabric] != []:
deploy_path = self.paths["LINKS_CFG_DEPLOY"].format(fabric)
@@ -3048,7 +3228,7 @@ def dcnm_links_get_switch_sync_status(self):
retry = False
- for fabric in self.diff_deploy.keys():
+ for fabric in self.diff_deploy:
if self.diff_deploy[fabric] != []:
@@ -3198,7 +3378,6 @@ def dcnm_links_update_inventory_data(self):
processed_fabrics.append(self.fabric)
for cfg in self.config:
-
# For every fabric included in the playbook, get the inventory details. This info is required
# to get ip_sn, hn_sn and sn_hn details
if cfg.get("dst_fabric", "") != "":
@@ -3209,9 +3388,51 @@ def dcnm_links_update_inventory_data(self):
)
self.inventory_data.update(inv_data)
+ # Get all switches which are managable. Deploy must be avoided to all switches which are not part of this list
+ managable_ip = [
+ (key, self.inventory_data[key]["serialNumber"])
+ for key in self.inventory_data
+ if str(self.inventory_data[key]["managable"]).lower() == "true"
+ ]
+ managable_hosts = [
+ (
+ self.inventory_data[key]["logicalName"],
+ self.inventory_data[key]["serialNumber"],
+ )
+ for key in self.inventory_data
+ if str(self.inventory_data[key]["managable"]).lower() == "true"
+ ]
+ self.managable = dict(managable_ip + managable_hosts)
+
+ self.meta_switches = [
+ self.inventory_data[key]["logicalName"]
+ for key in self.inventory_data
+ if self.inventory_data[key]["switchRoleEnum"] is None
+ ]
+
+ # Get all fabrics which are in monitoring mode. Deploy must be avoided to all fabrics which are part of this list
+ for fabric in processed_fabrics:
+ path = self.paths["FABRIC_ACCESS_MODE"].format(fabric)
+ resp = dcnm_send(self.module, "GET", path)
+
+ if resp and resp["RETURN_CODE"] == 200:
+ if str(resp["DATA"]["readonly"]).lower() == "true":
+ self.monitoring.append(fabric)
+
+ # Checkif source fabric is in monitoring mode. If so return an error, since fabrics in monitoring mode do not allow
+ # create/modify/delete and deploy operations.
+ if self.fabric in self.monitoring:
+ self.module.fail_json(
+ msg="Error: Source Fabric '{0}' is in Monitoring mode, No changes are allowed on the fabric\n".format(
+ self.fabric
+ )
+ )
+
# Based on the updated inventory_data, update ip_sn, hn_sn and sn_hn objects
self.ip_sn, self.hn_sn = get_ip_sn_dict(self.inventory_data)
- self.sn_hn = dict([(value, key) for key, value in self.hn_sn.items()])
+ self.sn_hn = dict(
+ [(value, key) for key, value in self.hn_sn.items() if value != ""]
+ )
def dcnm_links_translate_playbook_info(self, config, ip_sn, hn_sn):
@@ -3236,13 +3457,22 @@ def dcnm_links_translate_playbook_info(self, config, ip_sn, hn_sn):
for cfg in config:
if cfg.get("src_device", "") != "":
- cfg["src_device"] = dcnm_get_ip_addr_info(
- self.module, cfg["src_device"], ip_sn, hn_sn
- )
+ if (
+ cfg["src_device"] in self.ip_sn
+ or cfg["src_device"] in self.hn_sn
+ ):
+ cfg["src_device"] = dcnm_get_ip_addr_info(
+ self.module, cfg["src_device"], ip_sn, hn_sn
+ )
if cfg.get("dst_device", "") != "":
- cfg["dst_device"] = dcnm_get_ip_addr_info(
- self.module, cfg["dst_device"], ip_sn, hn_sn
- )
+ if (
+ cfg["dst_device"] in self.ip_sn
+ or cfg["dst_device"] in self.hn_sn
+ and cfg["dst_device"] not in self.meta_switches
+ ):
+ cfg["dst_device"] = dcnm_get_ip_addr_info(
+ self.module, cfg["dst_device"], ip_sn, hn_sn
+ )
if cfg.get("template", None) is not None:
cfg["template"] = self.templates.get(
@@ -3282,7 +3512,6 @@ def main():
)
dcnm_links.dcnm_links_update_inventory_data()
-
dcnm_links.dcnm_links_translate_playbook_info(
dcnm_links.config, dcnm_links.ip_sn, dcnm_links.hn_sn
)
@@ -3312,6 +3541,12 @@ def main():
dcnm_links.dcnm_links_get_diff_query()
dcnm_links.result["diff"] = dcnm_links.changed_dict
+ dcnm_links.changed_dict[0]["debugs"].append(
+ {"Managable": dcnm_links.managable}
+ )
+ dcnm_links.changed_dict[0]["debugs"].append(
+ {"Monitoring": dcnm_links.monitoring}
+ )
if dcnm_links.diff_create or dcnm_links.diff_delete:
dcnm_links.result["changed"] = True
diff --git a/plugins/modules/dcnm_network.py b/plugins/modules/dcnm_network.py
index 4dd00b7a8..6c33c50aa 100644
--- a/plugins/modules/dcnm_network.py
+++ b/plugins/modules/dcnm_network.py
@@ -470,7 +470,6 @@
dcnm_version_supported,
dcnm_get_url,
)
-from ansible.module_utils.connection import Connection
from ansible.module_utils.basic import AnsibleModule
diff --git a/plugins/modules/dcnm_resource_manager.py b/plugins/modules/dcnm_resource_manager.py
index f953d1a7f..5cb774540 100644
--- a/plugins/modules/dcnm_resource_manager.py
+++ b/plugins/modules/dcnm_resource_manager.py
@@ -278,7 +278,6 @@
"""
-import time
import json
import copy
import ipaddress
diff --git a/plugins/modules/dcnm_rest.py b/plugins/modules/dcnm_rest.py
index 9ef0a0a51..84eda8ed1 100644
--- a/plugins/modules/dcnm_rest.py
+++ b/plugins/modules/dcnm_rest.py
@@ -90,7 +90,6 @@
import json
from json.decoder import JSONDecodeError
-from ansible.module_utils.connection import Connection
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.dcnm.plugins.module_utils.network.dcnm.dcnm import (
dcnm_send,
diff --git a/plugins/modules/dcnm_vrf.py b/plugins/modules/dcnm_vrf.py
index cf92a2a05..3198a593c 100644
--- a/plugins/modules/dcnm_vrf.py
+++ b/plugins/modules/dcnm_vrf.py
@@ -309,12 +309,12 @@
description:
- VRF Name to which this extension is attached
type: str
- required: true
+ required: false
interface:
description:
- Interface of the switch which is connected to the edge router
type: str
- required: false
+ required: true
ipv4_addr:
description:
- IP address of the interface which is connected to the edge router
@@ -448,15 +448,20 @@
- ip_address: 192.168.1.224
- ip_address: 192.168.1.225
vrf_lite:
- # All parameters under vrf_lite except peer_vrf are optional and
- # will be supplied by DCNM when omitted in the playbook
- - peer_vrf: test_vrf_1 # peer_vrf is mandatory
- interface: Ethernet1/16 # optional
+ - peer_vrf: test_vrf_1 # optional
+ interface: Ethernet1/16 # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
neighbor_ipv6: 2010::10:34:0:3 # optional
dot1q: 2 # dot1q can be got from dcnm/optional
+ - peer_vrf: test_vrf_2 # optional
+ interface: Ethernet1/17 # mandatory
+ ipv4_addr: 20.33.0.2/30 # optional
+ neighbor_ipv4: 20.33.0.1 # optional
+ ipv6_addr: 3010::10:34:0:7/64 # optional
+ neighbor_ipv6: 3010::10:34:0:3 # optional
+ dot1q: 3 # dot1q can be got from dcnm/optional
# The two VRFs below will be replaced in the target fabric.
- name: Replace vrfs
@@ -597,9 +602,6 @@ def __init__(self, module):
self.fabric = module.params["fabric"]
self.config = copy.deepcopy(module.params.get("config"))
self.check_mode = False
- self.vrf_ext = False
- self.role = ""
- self.serial = ""
self.have_create = []
self.want_create = []
self.diff_create = []
@@ -624,7 +626,6 @@ def __init__(self, module):
self.diff_deploy = {}
self.diff_undeploy = {}
self.diff_delete = {}
- self.vrflitevalues = {}
self.diff_input_format = []
self.query = []
self.dcnm_version = dcnm_version_supported(self.module)
@@ -643,7 +644,7 @@ def __init__(self, module):
self.failed_to_rollback = False
self.WAIT_TIME_FOR_DELETE_LOOP = 5 # in seconds
- def diff_for_attach_deploy(self, want_a, have_a):
+ def diff_for_attach_deploy(self, want_a, have_a, replace=False):
attach_list = []
@@ -653,6 +654,7 @@ def diff_for_attach_deploy(self, want_a, have_a):
dep_vrf = False
for want in want_a:
found = False
+ interface_match = False
if have_a:
for have in have_a:
if want["serialNumber"] == have["serialNumber"]:
@@ -668,51 +670,89 @@ def diff_for_attach_deploy(self, want_a, have_a):
want_e = ast.literal_eval(want_ext_values["VRF_LITE_CONN"])
have_e = ast.literal_eval(have_ext_values["VRF_LITE_CONN"])
- if (
- want_e["VRF_LITE_CONN"][0]["IF_NAME"]
- == have_e["VRF_LITE_CONN"][0]["IF_NAME"]
- ):
- if (
- want_e["VRF_LITE_CONN"][0]["DOT1Q_ID"]
- == have_e["VRF_LITE_CONN"][0]["DOT1Q_ID"]
- ):
- if (
- want_e["VRF_LITE_CONN"][0]["IP_MASK"]
- == have_e["VRF_LITE_CONN"][0]["IP_MASK"]
- ):
- if (
- want_e["VRF_LITE_CONN"][0]["NEIGHBOR_IP"]
- == have_e["VRF_LITE_CONN"][0]["NEIGHBOR_IP"]
- ):
+ if replace and (len(want_e["VRF_LITE_CONN"]) != len(have_e["VRF_LITE_CONN"])):
+ # In case of replace/override if the length of want and have lite attach of a switch
+ # is not same then we have to push the want to NDFC. No further check is required for
+ # this switch
+ break
+
+ for wlite in want_e["VRF_LITE_CONN"]:
+ for hlite in have_e["VRF_LITE_CONN"]:
+ found = False
+ interface_match = False
+ if wlite["IF_NAME"] == hlite["IF_NAME"]:
+ found = True
+ interface_match = True
+ if wlite["DOT1Q_ID"]:
if (
- want_e["VRF_LITE_CONN"][0]["IPV6_MASK"]
- == have_e["VRF_LITE_CONN"][0][
- "IPV6_MASK"
- ]
+ wlite["DOT1Q_ID"]
+ != hlite["DOT1Q_ID"]
):
- if (
- want_e["VRF_LITE_CONN"][0][
- "IPV6_NEIGHBOR"
- ]
- == have_e["VRF_LITE_CONN"][0][
- "IPV6_NEIGHBOR"
- ]
- ):
- if (
- want_e["VRF_LITE_CONN"][0][
- "PEER_VRF_NAME"
- ]
- == have_e["VRF_LITE_CONN"][0][
- "PEER_VRF_NAME"
- ]
- ):
- found = True
+ found = False
+ break
+
+ if wlite["IP_MASK"]:
+ if (
+ wlite["IP_MASK"]
+ != hlite["IP_MASK"]
+ ):
+ found = False
+ break
+
+ if wlite["NEIGHBOR_IP"]:
+ if (
+ wlite["NEIGHBOR_IP"]
+ != hlite["NEIGHBOR_IP"]
+ ):
+ found = False
+ break
+
+ if wlite["IPV6_MASK"]:
+ if (
+ wlite["IPV6_MASK"]
+ != hlite["IPV6_MASK"]
+ ):
+ found = False
+ break
+
+ if wlite["IPV6_NEIGHBOR"]:
+ if (
+ wlite["IPV6_NEIGHBOR"]
+ != hlite["IPV6_NEIGHBOR"]
+ ):
+ found = False
+ break
+
+ if wlite["PEER_VRF_NAME"]:
+ if (
+ wlite["PEER_VRF_NAME"]
+ != hlite["PEER_VRF_NAME"]
+ ):
+ found = False
+ break
+
+ if found:
+ break
+
+ if interface_match and not found:
+ break
+
+ if interface_match and not found:
+ break
elif (
want["extensionValues"] != ""
- or have["extensionValues"] != ""
+ and have["extensionValues"] == ""
):
found = False
+ elif (
+ want["extensionValues"] == ""
+ and have["extensionValues"] != ""
+ ):
+ if replace:
+ found = False
+ else:
+ found = True
else:
found = True
@@ -729,6 +769,12 @@ def diff_for_attach_deploy(self, want_a, have_a):
if bool(have["deployment"]) is not bool(want["deployment"]):
dep_vrf = True
+ if found:
+ break
+
+ if interface_match and not found:
+ break
+
if not found:
if bool(want["isAttached"]):
del want["isAttached"]
@@ -738,6 +784,8 @@ def diff_for_attach_deploy(self, want_a, have_a):
def update_attach_params(self, attach, vrf_name, deploy, vlanId):
+ vrf_ext = False
+
if not attach:
return {}
@@ -748,7 +796,6 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId):
for ip, ser in self.ip_sn.items():
if ip == attach["ip_address"]:
serial = ser
- self.serial = ser
if not serial:
self.module.fail_json(
@@ -758,7 +805,6 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId):
)
role = self.inventory_data[attach["ip_address"]].get("switchRole")
- self.role = role
if role.lower() == "spine" or role.lower() == "super spine":
msg = "VRFs cannot be attached to switch {0} with role {1}".format(
@@ -767,6 +813,11 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId):
self.module.fail_json(msg=msg)
ext_values = {}
+ ext_values["VRF_LITE_CONN"] = []
+ ms_con = {}
+ ms_con["MULTISITE_CONN"] = []
+ ext_values["MULTISITE_CONN"] = json.dumps(ms_con)
+
if attach["vrf_lite"]:
"""Before apply the vrf_lite config, need double check if the swtich role is started wth Border"""
r = re.search(r"\bborder\b", role.lower())
@@ -780,43 +831,69 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId):
for a_l in at_lite:
if (
a_l["interface"]
- and a_l["dot1q"]
- and a_l["ipv4_addr"]
- and a_l["neighbor_ipv4"]
- and a_l["ipv6_addr"]
- and a_l["neighbor_ipv6"]
- and a_l["peer_vrf"]
+ or a_l["dot1q"]
+ or a_l["ipv4_addr"]
+ or a_l["neighbor_ipv4"]
+ or a_l["ipv6_addr"]
+ or a_l["neighbor_ipv6"]
+ or a_l["peer_vrf"]
):
- """if all the elements are provided by the user in the playbook fill the extension values"""
+ """if vrf lite elements are provided by the user in the playbook fill the extension values"""
vrflite_con = {}
vrflite_con["VRF_LITE_CONN"] = []
vrflite_con["VRF_LITE_CONN"].append({})
- vrflite_con["VRF_LITE_CONN"][0]["IF_NAME"] = a_l["interface"]
- vrflite_con["VRF_LITE_CONN"][0]["DOT1Q_ID"] = str(a_l["dot1q"])
- vrflite_con["VRF_LITE_CONN"][0]["IP_MASK"] = a_l["ipv4_addr"]
- vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] = a_l[
- "neighbor_ipv4"
- ]
- vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_ASN"] = "65535"
- vrflite_con["VRF_LITE_CONN"][0]["IPV6_MASK"] = a_l["ipv6_addr"]
- vrflite_con["VRF_LITE_CONN"][0]["IPV6_NEIGHBOR"] = a_l[
- "neighbor_ipv6"
- ]
- vrflite_con["VRF_LITE_CONN"][0]["AUTO_VRF_LITE_FLAG"] = "false"
- vrflite_con["VRF_LITE_CONN"][0]["PEER_VRF_NAME"] = a_l["peer_vrf"]
+ if a_l["interface"]:
+ vrflite_con["VRF_LITE_CONN"][0]["IF_NAME"] = a_l["interface"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["IF_NAME"] = ""
+
+ if a_l["dot1q"]:
+ vrflite_con["VRF_LITE_CONN"][0]["DOT1Q_ID"] = str(a_l["dot1q"])
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["DOT1Q_ID"] = ""
+
+ if a_l["ipv4_addr"]:
+ vrflite_con["VRF_LITE_CONN"][0]["IP_MASK"] = a_l["ipv4_addr"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["IP_MASK"] = ""
+
+ if a_l["neighbor_ipv4"]:
+ vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] = a_l[
+ "neighbor_ipv4"
+ ]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["NEIGHBOR_IP"] = ""
+
+ if a_l["ipv6_addr"]:
+ vrflite_con["VRF_LITE_CONN"][0]["IPV6_MASK"] = a_l["ipv6_addr"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["IPV6_MASK"] = ""
+
+ if a_l["neighbor_ipv6"]:
+ vrflite_con["VRF_LITE_CONN"][0]["IPV6_NEIGHBOR"] = a_l[
+ "neighbor_ipv6"
+ ]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["IPV6_NEIGHBOR"] = ""
+
+ if a_l["peer_vrf"]:
+ vrflite_con["VRF_LITE_CONN"][0]["PEER_VRF_NAME"] = a_l["peer_vrf"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0]["PEER_VRF_NAME"] = ""
+
vrflite_con["VRF_LITE_CONN"][0][
"VRF_LITE_JYTHON_TEMPLATE"
] = "Ext_VRF_Lite_Jython"
- ext_values["VRF_LITE_CONN"] = json.dumps(vrflite_con)
+ if (ext_values["VRF_LITE_CONN"]):
+ ext_values["VRF_LITE_CONN"]["VRF_LITE_CONN"].extend(vrflite_con["VRF_LITE_CONN"])
+ else:
+ ext_values["VRF_LITE_CONN"] = vrflite_con
- ms_con = {}
- ms_con["MULTISITE_CONN"] = []
- ext_values["MULTISITE_CONN"] = json.dumps(ms_con)
+ ext_values["VRF_LITE_CONN"] = json.dumps(ext_values["VRF_LITE_CONN"])
- self.vrflitevalues = ext_values
- self.vrf_ext = True
+ vrf_ext = True
attach.update({"fabric": self.fabric})
attach.update({"vrfName": vrf_name})
@@ -824,14 +901,13 @@ def update_attach_params(self, attach, vrf_name, deploy, vlanId):
attach.update({"deployment": deploy})
attach.update({"isAttached": True})
attach.update({"serialNumber": serial})
- if self.vrf_ext:
+ if vrf_ext:
attach.update({"extensionValues": json.dumps(ext_values).replace(" ", "")})
attach.update(
{
"instanceValues": '{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}'
}
)
- del attach["vrf_lite"]
else:
attach.update({"extensionValues": ""})
attach.update({"instanceValues": ""})
@@ -1247,8 +1323,10 @@ def get_have(self):
ext_values = ast.literal_eval(
ext_values["VRF_LITE_CONN"]
)
- for ev in ext_values["VRF_LITE_CONN"]:
- extension_values = {}
+ extension_values = {}
+ extension_values["VRF_LITE_CONN"] = []
+
+ for ev in ext_values.get("VRF_LITE_CONN"):
vrflite_con = {}
vrflite_con["VRF_LITE_CONN"] = []
@@ -1277,24 +1355,30 @@ def get_have(self):
] = "false"
vrflite_con["VRF_LITE_CONN"][0][
"PEER_VRF_NAME"
- ] = attach["vrfName"]
+ ] = ev["PEER_VRF_NAME"]
vrflite_con["VRF_LITE_CONN"][0][
"VRF_LITE_JYTHON_TEMPLATE"
] = "Ext_VRF_Lite_Jython"
- extension_values["VRF_LITE_CONN"] = json.dumps(
- vrflite_con
- )
- ms_con = {}
- ms_con["MULTISITE_CONN"] = []
- extension_values["MULTISITE_CONN"] = json.dumps(
- ms_con
- )
- e_values = json.dumps(extension_values).replace(
- " ", ""
- )
+ if (extension_values["VRF_LITE_CONN"]):
+ extension_values["VRF_LITE_CONN"]["VRF_LITE_CONN"].extend(vrflite_con["VRF_LITE_CONN"])
+ else:
+ extension_values["VRF_LITE_CONN"] = vrflite_con
+
+ extension_values["VRF_LITE_CONN"] = json.dumps(
+ extension_values["VRF_LITE_CONN"]
+ )
+
+ ms_con = {}
+ ms_con["MULTISITE_CONN"] = []
+ extension_values["MULTISITE_CONN"] = json.dumps(
+ ms_con
+ )
+ e_values = json.dumps(extension_values).replace(
+ " ", ""
+ )
- attach.update({"extensionValues": e_values})
+ attach.update({"extensionValues": e_values})
if dep_vrf:
upd_vrfs += dep_vrf + ","
@@ -1476,7 +1560,7 @@ def get_diff_replace(self):
all_vrfs = ""
- self.get_diff_merge()
+ self.get_diff_merge(replace=True)
diff_create = self.diff_create
diff_attach = self.diff_attach
diff_deploy = self.diff_deploy
@@ -1557,7 +1641,7 @@ def get_diff_replace(self):
self.diff_attach = diff_attach
self.diff_deploy = diff_deploy
- def get_diff_merge(self):
+ def get_diff_merge(self, replace=False):
# Special cases:
# 1. Auto generate vrfId if its not mentioned by user:
@@ -1709,7 +1793,7 @@ def get_diff_merge(self):
if want_a["vrfName"] == have_a["vrfName"]:
attach_found = True
diff, vrf = self.diff_for_attach_deploy(
- want_a["lanAttachList"], have_a["lanAttachList"]
+ want_a["lanAttachList"], have_a["lanAttachList"], replace
)
if diff:
base = want_a.copy()
@@ -2170,18 +2254,21 @@ def push_to_remote(self, is_rollback=False):
for v_a in d_a["lanAttachList"]:
v_a.update(vlan=0)
if v_a.get("vrf_lite"):
- """Before apply the vrf_lite config, need double check if the switch role is started wth Border"""
- r = re.search(r"\bborder\b", self.role.lower())
- if not r:
- msg = "VRF LITE cannot be attached to switch {0} with role {1}".format(
- v_a["ip_address"], self.role
- )
- self.module.fail_json(msg=msg)
+ for ip, ser in self.ip_sn.items():
+ if ser == v_a["serialNumber"]:
+ """Before apply the vrf_lite config, need double check if the switch role is started wth Border"""
+ role = self.inventory_data[ip].get("switchRole")
+ r = re.search(r"\bborder\b", role.lower())
+ if not r:
+ msg = "VRF LITE cannot be attached to switch {0} with role {1}".format(
+ ip, role
+ )
+ self.module.fail_json(msg=msg)
"""Get the IP/Interface that is connected to edge router can be get from below query"""
method = "GET"
path = self.paths["GET_VRF_SWITCH"].format(
- self.fabric, self.diff_attach[0]["vrfName"], self.serial
+ self.fabric, v_a["vrfName"], v_a["serialNumber"]
)
lite_objects = dcnm_send(self.module, method, path)
@@ -2193,105 +2280,120 @@ def push_to_remote(self, is_rollback=False):
"extensionPrototypeValues"
]
ext_values = None
+ extension_values = {}
+ extension_values["VRF_LITE_CONN"] = []
+ extension_values["MULTISITE_CONN"] = []
+
for ext_l in lite:
if str(ext_l.get("extensionType")) == "VRF_LITE":
ext_values = ext_l["extensionValues"]
ext_values = ast.literal_eval(ext_values)
- extension_values = {}
- for ad_l in v_a["vrf_lite"]:
- vrflite_con = {}
- vrflite_con["VRF_LITE_CONN"] = []
- vrflite_con["VRF_LITE_CONN"].append({})
- if ad_l["interface"]:
+ for ad_l in v_a.get("vrf_lite"):
+ if ad_l["interface"] == ext_values["IF_NAME"]:
+ vrflite_con = {}
+ vrflite_con["VRF_LITE_CONN"] = []
+ vrflite_con["VRF_LITE_CONN"].append({})
vrflite_con["VRF_LITE_CONN"][0][
"IF_NAME"
] = ad_l["interface"]
- else:
- vrflite_con["VRF_LITE_CONN"][0][
- "IF_NAME"
- ] = ext_values["IF_NAME"]
-
- if ad_l["dot1q"]:
- vrflite_con["VRF_LITE_CONN"][0][
- "DOT1Q_ID"
- ] = str(ad_l["dot1q"])
- else:
- vrflite_con["VRF_LITE_CONN"][0][
- "DOT1Q_ID"
- ] = str(ext_values["DOT1Q_ID"])
- if ad_l["ipv4_addr"]:
- vrflite_con["VRF_LITE_CONN"][0][
- "IP_MASK"
- ] = ad_l["ipv4_addr"]
- else:
- vrflite_con["VRF_LITE_CONN"][0][
- "IP_MASK"
- ] = ext_values["IP_MASK"]
+ if ad_l["dot1q"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "DOT1Q_ID"
+ ] = str(ad_l["dot1q"])
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "DOT1Q_ID"
+ ] = str(ext_values["DOT1Q_ID"])
+
+ if ad_l["ipv4_addr"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IP_MASK"
+ ] = ad_l["ipv4_addr"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IP_MASK"
+ ] = ext_values["IP_MASK"]
+
+ if ad_l["neighbor_ipv4"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "NEIGHBOR_IP"
+ ] = ad_l["neighbor_ipv4"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "NEIGHBOR_IP"
+ ] = ext_values["NEIGHBOR_IP"]
- if ad_l["neighbor_ipv4"]:
- vrflite_con["VRF_LITE_CONN"][0][
- "NEIGHBOR_IP"
- ] = ad_l["neighbor_ipv4"]
- else:
vrflite_con["VRF_LITE_CONN"][0][
- "NEIGHBOR_IP"
- ] = ext_values["NEIGHBOR_IP"]
+ "NEIGHBOR_ASN"
+ ] = ext_values["NEIGHBOR_ASN"]
+
+ if ad_l["ipv6_addr"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IPV6_MASK"
+ ] = ad_l["ipv6_addr"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IPV6_MASK"
+ ] = ext_values["IPV6_MASK"]
+
+ if ad_l["neighbor_ipv6"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IPV6_NEIGHBOR"
+ ] = ad_l["neighbor_ipv6"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "IPV6_NEIGHBOR"
+ ] = ext_values["IPV6_NEIGHBOR"]
- vrflite_con["VRF_LITE_CONN"][0][
- "NEIGHBOR_ASN"
- ] = ext_values["NEIGHBOR_ASN"]
-
- if ad_l["ipv6_addr"]:
- vrflite_con["VRF_LITE_CONN"][0][
- "IPV6_MASK"
- ] = ad_l["ipv6_addr"]
- else:
vrflite_con["VRF_LITE_CONN"][0][
- "IPV6_MASK"
- ] = ext_values["IPV6_MASK"]
+ "AUTO_VRF_LITE_FLAG"
+ ] = ext_values["AUTO_VRF_LITE_FLAG"]
+
+ if ad_l["peer_vrf"]:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "PEER_VRF_NAME"
+ ] = ad_l["peer_vrf"]
+ else:
+ vrflite_con["VRF_LITE_CONN"][0][
+ "PEER_VRF_NAME"
+ ] = ext_values["PEER_VRF_NAME"]
- if ad_l["neighbor_ipv6"]:
- vrflite_con["VRF_LITE_CONN"][0][
- "IPV6_NEIGHBOR"
- ] = ad_l["neighbor_ipv6"]
- else:
vrflite_con["VRF_LITE_CONN"][0][
- "IPV6_NEIGHBOR"
- ] = ext_values["IPV6_NEIGHBOR"]
+ "VRF_LITE_JYTHON_TEMPLATE"
+ ] = "Ext_VRF_Lite_Jython"
+ if (extension_values["VRF_LITE_CONN"]):
+ extension_values["VRF_LITE_CONN"]["VRF_LITE_CONN"].extend(vrflite_con["VRF_LITE_CONN"])
+ else:
+ extension_values["VRF_LITE_CONN"] = vrflite_con
- vrflite_con["VRF_LITE_CONN"][0][
- "AUTO_VRF_LITE_FLAG"
- ] = ext_values["AUTO_VRF_LITE_FLAG"]
- vrflite_con["VRF_LITE_CONN"][0][
- "PEER_VRF_NAME"
- ] = ad_l["peer_vrf"]
- vrflite_con["VRF_LITE_CONN"][0][
- "VRF_LITE_JYTHON_TEMPLATE"
- ] = "Ext_VRF_Lite_Jython"
- extension_values["VRF_LITE_CONN"] = json.dumps(
- vrflite_con
- )
+ ms_con = {}
+ ms_con["MULTISITE_CONN"] = []
+ extension_values["MULTISITE_CONN"] = json.dumps(
+ ms_con
+ )
- ms_con = {}
- ms_con["MULTISITE_CONN"] = []
- extension_values["MULTISITE_CONN"] = json.dumps(
- ms_con
- )
-
- v_a["extensionValues"] = json.dumps(
- extension_values
- ).replace(" ", "")
- v_a[
- "instanceValues"
- ] = '{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}'
- del v_a["vrf_lite"]
+ del ad_l
if ext_values is None:
- msg = "There is no VRF LITE capable interface on this witch {0}".format(
- v_a["ip_address"]
- )
+ for ip, ser in self.ip_sn.items():
+ if ser == v_a["serialNumber"]:
+ msg = "There is no VRF LITE capable interface on this switch {0}".format(
+ ip
+ )
self.module.fail_json(msg=msg)
+ else:
+ extension_values["VRF_LITE_CONN"] = json.dumps(
+ extension_values["VRF_LITE_CONN"]
+ )
+ v_a["extensionValues"] = json.dumps(
+ extension_values
+ ).replace(" ", "")
+ v_a[
+ "instanceValues"
+ ] = '{"loopbackId":"","loopbackIpAddress":"","loopbackIpV6Address":""}'
+ if v_a.get("vrf_lite", None) is not None:
+ del v_a["vrf_lite"]
else:
if v_a.get("vrf_lite", None) is not None:
@@ -2414,11 +2516,11 @@ def validate_input(self):
att_spec = dict(
ip_address=dict(required=True, type="str"),
deploy=dict(type="bool", default=True),
- vrf_lite=dict(type="list", default=[]),
+ vrf_lite=dict(type="list"),
)
lite_spec = dict(
- interface=dict(type="str"),
- peer_vrf=dict(required=True, type="str"),
+ interface=dict(required=True, type="str"),
+ peer_vrf=dict(type="str"),
ipv4_addr=dict(type="ipv4_subnet"),
neighbor_ipv4=dict(type="ipv4"),
ipv6_addr=dict(type="ipv6"),
@@ -2444,11 +2546,6 @@ def validate_input(self):
# msg = "ip_address and vlan_id are mandatory under attach parameters"
if "ip_address" not in attach:
msg = "ip_address is mandatory under attach parameters"
- if attach.get("vrf_lite"):
- for vl in attach["vrf_lite"]:
- if not vl.get("peer_vrf"):
- msg = "peer_vrf is mandatory under attach VRF LITE parameters"
-
else:
if state == "merged" or state == "overridden" or state == "replaced":
msg = "config: element is mandatory for this state {0}".format(
@@ -2550,7 +2647,7 @@ def validate_input(self):
)
lite_spec = dict(
interface=dict(type="str"),
- peer_vrf=dict(required=True, type="str"),
+ peer_vrf=dict(type="str"),
ipv4_addr=dict(type="ipv4_subnet"),
neighbor_ipv4=dict(type="ipv4"),
ipv6_addr=dict(type="ipv6"),
diff --git a/tests/integration/targets/dcnm_links/tests/dcnm/dcnm_links_misc.yaml b/tests/integration/targets/dcnm_links/tests/dcnm/dcnm_links_misc.yaml
new file mode 100644
index 000000000..3091b795e
--- /dev/null
+++ b/tests/integration/targets/dcnm_links/tests/dcnm/dcnm_links_misc.yaml
@@ -0,0 +1,900 @@
+##############################################
+## SETUP ##
+##############################################
+
+- name: Remove local log file
+ local_action: command rm -f dcnm_links.log
+
+- block:
+
+#############################################
+# DELETE ##
+#############################################
+
+ - name: Initial setup - Delete Links on numbered fabric
+ cisco.dcnm.dcnm_links: &links_delete
+ state: deleted # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_num_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_3 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_3 }}" # Interface on the Destination fabric
+ src_device: "dummy-switch-1" # Device on the Source fabric
+ dst_device: "{{ ansible_num_switch1 }}" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_3 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_3 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_4 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_4 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw100" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_5 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_5 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_6 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_6 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_7 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_7 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_8 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_8 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw4" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_9 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_9 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_10 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_10 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-ext-sw1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_11 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_11 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ - 'item["RETURN_CODE"] == 200'
+ loop: '{{ result.response }}'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Source Fabric in Monitoring mode
+ cisco.dcnm.dcnm_links:
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_extmon_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_num_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_3 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_3 }}" # Interface on the Destination fabric
+ src_device: "dummy-switch-1" # Device on the Source fabric
+ dst_device: "{{ ansible_num_switch1 }}" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.1.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.1.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_extmon_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_num_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - '("Monitoring mode" in result["msg"])'
+ - '("No changes are allowed on the fabric" in result["msg"])'
+ - '("{{ ansible_extmon_fabric }}" in result["msg"])'
+
+ - name: Create Links - Destination Fabric - Monitoring, Source Switches Not Managable and Destination Switches Not Managable (in POAP)
+ cisco.dcnm.dcnm_links: &links_create1
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_3 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_3 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.2.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.2.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+############################################
+# IDEMPOTENCE #
+############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create1
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_3 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_3 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Destination Fabric - Monitoring, Source Switches Not Managable and Destination Switches Not Managable (not present)
+ cisco.dcnm.dcnm_links: &links_create2
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_4 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_4 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw100" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.3.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.3.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ peer1_description: "Description of source" # optional, default is ""
+ peer2_description: "Description of dest" # optional, default is ""
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+ - name: Merge Links - Destination Fabric - Monitoring, Source Switches Not Managable and Destination Switches Not Managable (not present)
+ cisco.dcnm.dcnm_links:
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_4 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_4 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw100" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.20.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.20.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ peer1_description: "Description of source - REP" # optional, default is ""
+ peer2_description: "Description of dest - REP" # optional, default is ""
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 1'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+ - name: Merge Links - Destination Fabric - Monitoring, Source Switches Not Managable and Destination Switches Not Managable (not present)
+ cisco.dcnm.dcnm_links:
+ state: replaced # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_4 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_4 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw100" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.20.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.20.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ peer1_description: "Description of source - REP" # optional, default is ""
+ peer2_description: "Description of dest - REP" # optional, default is ""
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 1'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create2
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 1'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_4 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_4 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw100" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Destination Fabric - Monitoring, Source Switches Not Managable and Destination Switches Managable
+ cisco.dcnm.dcnm_links: &links_create3
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_5 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_5 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.4.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.4.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create3
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_5 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_5 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Destination Fabric - Monitoring, Source Switches Managable and Destination Switches Not Managable
+ cisco.dcnm.dcnm_links: &links_create4
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_6 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_6 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.5.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.5.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create4
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_6 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_6 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw3" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Destination Fabric - Monitoring, Source Switches Managable and Destination Switches Managable
+ cisco.dcnm.dcnm_links: &links_create5
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_7 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_7 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.6.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.6.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_svi_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create5
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_svi_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_svi_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_7 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_7 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-svi1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Source Switches Not Managable and Destination Switches Not Managable
+ cisco.dcnm.dcnm_links: &links_create6
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_8 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_8 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw4" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.7.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.7.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_ext_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_ext_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create6
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_ext_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_8 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_8 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-test-sw4" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Source Switches Not Managable and Destination Switches Managable
+ cisco.dcnm.dcnm_links: &links_create7
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_9 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_9 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.8.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.8.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_unnum_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_unnum_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create7
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_unnum_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_9 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_9 }}" # Interface on the Destination fabric
+ src_device: "n9kv-test-sw1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Source Switches Managable and Destination Switches Not Managable (not present in Fabric)
+ cisco.dcnm.dcnm_links: &links_create8
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_10 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_10 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-ext-sw1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.9.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.9.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_ext_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_ext_fabric }}" ] | length) == 0'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create8
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_ext_fabric }}" ] | length) == 0'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_ext_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_10 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_10 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-ext-sw1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# MERGE ##
+#############################################
+
+ - name: Create Links - Source Switches Managable and Destination Switches Managable
+ cisco.dcnm.dcnm_links: &links_create9
+ state: merged # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_11 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_11 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ profile:
+ ipv4_subnet: 193.168.10.1/24 # IP address of interface in src fabric with mask
+ neighbor_ip: 193.168.10.2 # IP address of the interface in dst fabric
+ src_asn: "{{ ansible_num_asn }}" # BGP ASN in source fabric
+ dst_asn: "{{ ansible_unnum_asn }}" # BGP ASN in destination fabric
+ register: result
+ ignore_errors: yes
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 1'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_unnum_fabric }}" ] | length) == 1'
+
+#############################################
+# IDEMPOTENCE ##
+#############################################
+
+ - name: Create Links - Idempotence
+ cisco.dcnm.dcnm_links: *links_create9
+ register: result
+
+ - assert:
+ that:
+ - 'result.changed == true'
+ - '(result["diff"][0]["merged"] | length) == 0'
+ - '(result["diff"][0]["modified"] | length) == 0'
+ - '(result["diff"][0]["deleted"] | length) == 0'
+ - '(result["diff"][0]["query"] | length) == 0'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_num_fabric }}" ] | length) == 1'
+ - '(result["diff"][0]["deploy"][0][ "{{ ansible_unnum_fabric }}" ] | length) == 1'
+
+###############################################
+### QUERY ##
+###############################################
+
+ - name: Query Links
+ cisco.dcnm.dcnm_links:
+ state: query # choose from [merged, replaced, deleted, query]
+ src_fabric: "{{ ansible_num_fabric }}"
+ config:
+ - dst_fabric: "{{ ansible_unnum_fabric }}" # Destination fabric
+ src_interface: "{{ intf_1_11 }}" # Interface on the Source fabric
+ dst_interface: "{{ intf_1_11 }}" # Interface on the Destination fabric
+ src_device: "n9kv-num-1" # Device on the Source fabric
+ dst_device: "n9kv-unnum-1" # Device on the Destination fabric
+ template: ext_fabric_setup # template to be applied, choose from
+ # [ ext_fabric_setup, ext_multisite_underlay_setup,
+ # ext_evpn_multisite_overlay_setup ]
+ register: result
+
+ - assert:
+ that:
+ '(result["response"] | length) >= 1'
+
+#############################################
+# CLEANUP ##
+#############################################
+
+ always:
+
+ - name: Cleanup - Delete Links
+ cisco.dcnm.dcnm_links: *links_delete
+ register: result
+ when: IT_CONTEXT is not defined
+
+ - assert:
+ that:
+ - 'item["RETURN_CODE"] == 200'
+ loop: '{{ result.response }}'
+ when: IT_CONTEXT is not defined
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/deleted.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/deleted.yaml
index 0f7b08217..489b94c8c 100644
--- a/tests/integration/targets/dcnm_vrf/tests/dcnm/deleted.yaml
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/deleted.yaml
@@ -35,8 +35,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -80,8 +78,6 @@
vrf_id: 9008011
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
- source: null
- service_vrf_template: null
register: result
- assert:
@@ -118,14 +114,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -168,14 +162,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -208,6 +200,13 @@
- 'result.response|length == 0'
- 'result.diff|length == 0'
+- name: QUERY - sleep for 40 seconds for DCNM to completely update the state
+ # The vrf lite profile removal returns ok for deployment, but the switch takes time to remove
+ # the profile so wait for some time before creating a new vrf, else the switch goes into
+ # OUT-OF-SYNC state
+ wait_for:
+ timeout: 40
+
- name: DELETED - Create, Attach and Deploy new VRF - VLAN/VRF LITE EXTENSION Provided by the User in one switch
cisco.dcnm.dcnm_vrf:
fabric: "{{ test_fabric }}"
@@ -218,14 +217,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/merged.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/merged.yaml
index 7ee0dbd0c..f2d7afbc8 100644
--- a/tests/integration/targets/dcnm_vrf/tests/dcnm/merged.yaml
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/merged.yaml
@@ -39,8 +39,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -94,8 +92,6 @@
vrf_id: 9008011
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -150,14 +146,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -204,7 +198,14 @@
fabric: "{{ test_fabric }}"
state: deleted
-- name: MERGED - Create, Attach and Deploy new VRF - VRF/VRF LITE EXTENSION Provided by the DCNM - Only Mandatory option - Rest are populated from DCNM
+- name: MERGED - sleep for 40 seconds for DCNM to completely remove lite profile
+ # The vrf lite profile removal returns ok for deployment, but the switch takes time to remove
+ # the profile so wait for some time before creating a new vrf, else the switch goes into
+ # OUT-OF-SYNC state
+ wait_for:
+ timeout: 40
+
+- name: MERGED - Create, Attach and Deploy new VRF - VRF/VRF LITE EXTENSION Provided by the DCNM - one optional - Rest are populated from DCNM
cisco.dcnm.dcnm_vrf:
fabric: "{{ test_fabric }}"
state: merged
@@ -214,13 +215,11 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- - ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
+ - interface: "{{ ansible_int1 }}" # mandatory
+ - ip_address: "{{ ansible_switch1 }}"
deploy: true
register: result
@@ -244,8 +243,8 @@
- '(result.response[1].DATA|dict2items)[1].value == "SUCCESS"'
- 'result.diff[0].attach[0].deploy == true'
- 'result.diff[0].attach[1].deploy == true'
- - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address'
- - '"{{ ansible_switch2 }}" in result.diff[0].attach[1].ip_address'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address'
+ - '"{{ ansible_switch1 }}" in result.diff[0].attach[1].ip_address'
- 'result.diff[0].vrf_name == "ansible-vrf-int1"'
- name: MERGED - Create, Attach and Deploy new VRF - Update with incorrect VRF ID.
@@ -257,8 +256,6 @@
vrf_id: 9008012
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -280,8 +277,6 @@
vrf_id: 9008012000000000
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -294,7 +289,7 @@
- 'result.changed == false'
- '"The item exceeds the allowed range of max" in result.msg'
-- name: MERGED - Create, Attach and Deploy new VRF - Try configuring VRF LITE without mandatory parameter
+- name: MERGED - Create, Attach and Deploy new VRF - Try configuring VRF LITE without required parameter
cisco.dcnm.dcnm_vrf:
fabric: "{{ test_fabric }}"
state: merged
@@ -304,13 +299,11 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - interface: "{{ ansible_int1 }}"
+ - peer_vrf: ansible-vrf-int1 # optional
deploy: true
register: result
ignore_errors: yes
@@ -318,7 +311,7 @@
- assert:
that:
- 'result.changed == false'
- - '"peer_vrf is mandatory under attach VRF LITE parameters" in result.msg'
+ - '"Invalid parameters in playbook: interface : Required parameter not found" in result.msg'
- name: MERGED - Create, Attach and Deploy new VRF - Try configuring VRF LITE to a non border switch
cisco.dcnm.dcnm_vrf:
@@ -330,13 +323,11 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/overridden.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/overridden.yaml
index 31dc6f8ca..1cee0dca7 100644
--- a/tests/integration/targets/dcnm_vrf/tests/dcnm/overridden.yaml
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/overridden.yaml
@@ -35,8 +35,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -81,8 +79,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -143,15 +139,13 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 1500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int2 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
- ipv4_addr: 10.33.0.2/30 # optional
+ - peer_vrf: ansible-vrf-int2 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
neighbor_ipv6: 2010::10:34:0:3 # optional
@@ -193,15 +187,13 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 1500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int2 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
- ipv4_addr: 10.33.0.7/30 # optional
+ - peer_vrf: ansible-vrf-int2 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.6/24 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:10/64 # optional
neighbor_ipv6: 2010::10:34:0:7 # optional
@@ -247,15 +239,13 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
- ipv4_addr: 10.33.0.1/30 # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.3/24 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:1/64 # optional
neighbor_ipv6: 2010::10:34:0:2 # optional
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/query.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/query.yaml
index f30d77e4d..76115d8d5 100644
--- a/tests/integration/targets/dcnm_vrf/tests/dcnm/query.yaml
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/query.yaml
@@ -35,8 +35,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -81,8 +79,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -132,14 +128,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 1500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- peer_vrf: ansible-vrf-int2 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -182,14 +176,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 1500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- peer_vrf: ansible-vrf-int2 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -240,8 +232,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/replaced.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/replaced.yaml
index 41132de15..0a555609c 100644
--- a/tests/integration/targets/dcnm_vrf/tests/dcnm/replaced.yaml
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/replaced.yaml
@@ -35,8 +35,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -79,8 +77,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
register: result
- name: Query fabric state until vrfStatus transitions to DEPLOYED state
@@ -89,7 +85,7 @@
state: query
register: query_result
until:
- - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ - "query_result.response[0].parent.vrfStatus is search('NA')"
retries: 30
delay: 2
@@ -122,8 +118,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
@@ -176,14 +170,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
@@ -226,8 +218,6 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
deploy: true
@@ -260,6 +250,13 @@
that:
- 'result.changed == false'
+- name: QUERY - sleep for 40 seconds for DCNM to completely update the state
+ # The vrf lite profile removal returns ok for deployment, but the switch takes time to remove
+ # the profile so wait for some time before creating a new vrf, else the switch goes into
+ # OUT-OF-SYNC state
+ wait_for:
+ timeout: 40
+
- name: REPLACED - Update existing VRF LITE extensions using Replace - Create VRF LITE Attachment Only
cisco.dcnm.dcnm_vrf: &conf4
fabric: "{{ test_fabric }}"
@@ -270,14 +267,12 @@
vrf_template: Default_VRF_Universal
vrf_extension_template: Default_VRF_Extension_Universal
vlan_id: 500
- source: null
- service_vrf_template: null
attach:
- ip_address: "{{ ansible_switch1 }}"
- ip_address: "{{ ansible_switch2 }}"
vrf_lite:
- - peer_vrf: ansible-vrf-int1 # peer_vrf is mandatory
- interface: "{{ ansible_int1 }}" # optional
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
ipv4_addr: 10.33.0.2/30 # optional
neighbor_ipv4: 10.33.0.1 # optional
ipv6_addr: 2010::10:34:0:7/64 # optional
diff --git a/tests/integration/targets/dcnm_vrf/tests/dcnm/self-contained-tests/vrf_lite.yaml b/tests/integration/targets/dcnm_vrf/tests/dcnm/self-contained-tests/vrf_lite.yaml
new file mode 100644
index 000000000..6ad41f906
--- /dev/null
+++ b/tests/integration/targets/dcnm_vrf/tests/dcnm/self-contained-tests/vrf_lite.yaml
@@ -0,0 +1,393 @@
+##############################################
+## SETUP ##
+##############################################
+
+- set_fact:
+ rest_path: "/rest/control/fabrics/{{ test_fabric }}"
+ when: controller_version == "11"
+
+- set_fact:
+ rest_path: "/appcenter/cisco/ndfc/api/v1/lan-fabric/rest/control/fabrics/{{ test_fabric }}"
+ when: controller_version >= "12"
+
+- name: MERGED - Verify if fabric is deployed.
+ cisco.dcnm.dcnm_rest:
+ method: GET
+ path: "{{ rest_path }}"
+ register: result
+
+- assert:
+ that:
+ - 'result.response.DATA != None'
+
+- name: MERGED - Clean up any existing vrfs
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: deleted
+
+- name: VRF LITE - sleep for 40 seconds for DCNM to completely update the state
+ # The vrf lite profile removal returns ok for deployment, but the switch takes time to remove
+ # the profile so wait for some time before creating a new vrf, else the switch goes into
+ # OUT-OF-SYNC state
+ wait_for:
+ timeout: 40
+
+###############################################
+### TESTS ##
+###############################################
+
+- name: VRF LITE- Create, Attach and Deploy new VRF - VLAN/VRF LITE EXTENSION Provided by the User in one switch
+ cisco.dcnm.dcnm_vrf: &conf1
+ fabric: "{{ test_fabric }}"
+ state: merged
+ config:
+ - vrf_name: ansible-vrf-int1
+ vrf_id: 9008011
+ vrf_template: Default_VRF_Universal
+ vrf_extension_template: Default_VRF_Extension_Universal
+ vlan_id: 500
+ attach:
+ - ip_address: "{{ ansible_switch1 }}"
+ - ip_address: "{{ ansible_switch2 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
+ neighbor_ipv4: 10.33.0.1 # optional
+ ipv6_addr: 2010::10:34:0:7/64 # optional
+ neighbor_ipv6: 2010::10:34:0:3 # optional
+ dot1q: 2 # dot1q can be got from dcnm
+ deploy: true
+ register: result
+
+- name: Query fabric state until vrfStatus transitions to DEPLOYED state
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: query
+ register: query_result
+ until:
+ - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ retries: 30
+ delay: 2
+
+- assert:
+ that:
+ - 'result.changed == true'
+ - 'result.response[0].RETURN_CODE == 200'
+ - 'result.response[1].RETURN_CODE == 200'
+ - 'result.response[2].RETURN_CODE == 200'
+ - '(result.response[1].DATA|dict2items)[0].value == "SUCCESS"'
+ - '(result.response[1].DATA|dict2items)[1].value == "SUCCESS"'
+ - 'result.diff[0].attach[0].deploy == true'
+ - 'result.diff[0].attach[1].deploy == true'
+ - '"{{ ansible_switch1 }}" in result.diff[0].attach[0].ip_address'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[1].ip_address'
+ - 'result.diff[0].vrf_name == "ansible-vrf-int1"'
+ - '"{{ ansible_int1 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int1" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"10.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+
+- name: MERGED - conf1 - Idempotence
+ cisco.dcnm.dcnm_vrf: *conf1
+ register: result
+
+- assert:
+ that:
+ - 'result.changed == false'
+ - 'result.response|length == 0'
+
+- name: VRF LITE- Attach and Deploy second VRF LITE EXTENSION Provided by the User in one switch
+ cisco.dcnm.dcnm_vrf: &conf2
+ fabric: "{{ test_fabric }}"
+ state: merged
+ config:
+ - vrf_name: ansible-vrf-int1
+ vrf_id: 9008011
+ vrf_template: Default_VRF_Universal
+ vrf_extension_template: Default_VRF_Extension_Universal
+ vlan_id: 500
+ attach:
+ - ip_address: "{{ ansible_switch2 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
+ neighbor_ipv4: 10.33.0.1 # optional
+ ipv6_addr: 2010::10:34:0:7/64 # optional
+ neighbor_ipv6: 2010::10:34:0:3 # optional
+ dot1q: 2 # dot1q can be got from dcnm
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int2 }}" # mandatory
+ ipv4_addr: 20.33.0.2/24 # optional
+ neighbor_ipv4: 20.33.0.1 # optional
+ ipv6_addr: 3010::10:34:0:7/64 # optional
+ neighbor_ipv6: 3010::10:34:0:3 # optional
+ dot1q: 21 # dot1q can be got from dcnm
+ - ip_address: "{{ ansible_switch1 }}"
+ deploy: true
+ register: result
+
+- name: Query fabric state until vrfStatus transitions to DEPLOYED state
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: query
+ register: query_result
+ until:
+ - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ retries: 30
+ delay: 2
+
+- assert:
+ that:
+ - 'result.changed == true'
+ - 'result.response[0].RETURN_CODE == 200'
+ - 'result.response[1].RETURN_CODE == 200'
+ - '(result.response[0].DATA|dict2items)[0].value == "SUCCESS"'
+ - 'result.diff[0].attach[0].deploy == true'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address'
+ - 'result.diff[0].vrf_name == "ansible-vrf-int1"'
+ - '"{{ ansible_int1 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int1" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"10.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"{{ ansible_int2 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"20.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+
+- name: VRF LITE - conf2 - Idempotence
+ cisco.dcnm.dcnm_vrf: *conf2
+ register: result
+
+- assert:
+ that:
+ - 'result.changed == false'
+ - 'result.response|length == 0'
+
+- name: VRF LITE- Replace VRF LITE Attachment and Deploy by the User in one switch
+ cisco.dcnm.dcnm_vrf: &conf3
+ fabric: "{{ test_fabric }}"
+ state: replaced
+ config:
+ - vrf_name: ansible-vrf-int1
+ vrf_id: 9008011
+ vrf_template: Default_VRF_Universal
+ vrf_extension_template: Default_VRF_Extension_Universal
+ vlan_id: 500
+ attach:
+ - ip_address: "{{ ansible_switch1 }}"
+ - ip_address: "{{ ansible_switch2 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
+ neighbor_ipv4: 10.33.0.1 # optional
+ ipv6_addr: 2010::10:34:0:7/64 # optional
+ neighbor_ipv6: 2010::10:34:0:3 # optional
+ dot1q: 2 # dot1q can be got from dcnm
+ deploy: true
+ register: result
+
+- name: Query fabric state until vrfStatus transitions to DEPLOYED state
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: query
+ register: query_result
+ until:
+ - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ retries: 30
+ delay: 2
+
+- assert:
+ that:
+ - 'result.changed == true'
+ - 'result.response[0].RETURN_CODE == 200'
+ - 'result.response[1].RETURN_CODE == 200'
+ - '(result.response[0].DATA|dict2items)[0].value == "SUCCESS"'
+ - 'result.diff[0].attach[0].deploy == true'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address'
+ - 'result.diff[0].vrf_name == "ansible-vrf-int1"'
+ - '"{{ ansible_int1 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int1" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"10.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+
+- name: MERGED - conf3 - Idempotence
+ cisco.dcnm.dcnm_vrf: *conf3
+ register: result
+
+- assert:
+ that:
+ - 'result.changed == false'
+ - 'result.response|length == 0'
+
+- name: VRF LITE- Override VRF and VRF LITE EXTENSION Provided by the User
+ cisco.dcnm.dcnm_vrf: &conf4
+ fabric: "{{ test_fabric }}"
+ state: overridden
+ config:
+ - vrf_name: ansible-vrf-int2
+ vrf_id: 9008013
+ vrf_template: Default_VRF_Universal
+ vrf_extension_template: Default_VRF_Extension_Universal
+ vlan_id: 400
+ attach:
+ - ip_address: "{{ ansible_switch2 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
+ neighbor_ipv4: 10.33.0.1 # optional
+ ipv6_addr: 2010::10:34:0:7/64 # optional
+ neighbor_ipv6: 2010::10:34:0:3 # optional
+ dot1q: 2 # dot1q can be got from dcnm
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int2 }}" # mandatory
+ ipv4_addr: 20.33.0.2/24 # optional
+ neighbor_ipv4: 20.33.0.1 # optional
+ ipv6_addr: 3010::10:34:0:7/64 # optional
+ neighbor_ipv6: 3010::10:34:0:3 # optional
+ dot1q: 21 # dot1q can be got from dcnm
+ deploy: true
+ register: result
+
+- name: Query fabric state until vrfStatus transitions to DEPLOYED state
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: query
+ register: query_result
+ until:
+ - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ retries: 30
+ delay: 2
+
+- assert:
+ that:
+ - 'result.changed == true'
+ - 'result.response[0].RETURN_CODE == 200'
+ - 'result.response[1].RETURN_CODE == 200'
+ - 'result.response[2].RETURN_CODE == 200'
+ - 'result.response[3].RETURN_CODE == 200'
+ - 'result.response[4].RETURN_CODE == 200'
+ - 'result.response[5].RETURN_CODE == 200'
+ - '(result.response[0].DATA|dict2items)[0].value == "SUCCESS"'
+ - '(result.response[0].DATA|dict2items)[1].value == "SUCCESS"'
+ - 'result.diff[0].attach[0].deploy == true'
+ - 'result.diff[1].attach[0].deploy == false'
+ - 'result.diff[1].attach[1].deploy == false'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address'
+ - '"{{ ansible_switch1 }}" in result.diff[1].attach[0].ip_address'
+ - '"{{ ansible_switch2 }}" in result.diff[1].attach[1].ip_address'
+ - 'result.diff[0].vrf_name == "ansible-vrf-int2"'
+ - 'result.diff[1].vrf_name == "ansible-vrf-int1"'
+ - '"{{ ansible_int1 }}" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int1" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"10.33.0.2/24" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"{{ ansible_int2 }}" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"20.33.0.2/24" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+
+- name: VRF LITE - conf4 - Idempotence
+ cisco.dcnm.dcnm_vrf: *conf4
+ register: result
+
+- assert:
+ that:
+ - 'result.changed == false'
+ - 'result.response|length == 0'
+
+- name: VRF LITE - Clean up any existing vrfs
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: deleted
+
+- name: VRF LITE - sleep for 40 seconds for DCNM to completely update the state
+ # The vrf lite profile removal returns ok for deployment, but the switch takes time to remove
+ # the profile so wait for some time before creating a new vrf, else the switch goes into
+ # OUT-OF-SYNC state
+ wait_for:
+ timeout: 40
+
+- name: VRF LITE- Create, Attach and Deploy new VRF - VLAN/VRF LITE EXTENSION Provided by the User in multiple switch
+ cisco.dcnm.dcnm_vrf: &conf5
+ fabric: "{{ test_fabric }}"
+ state: merged
+ config:
+ - vrf_name: ansible-vrf-int2
+ vrf_id: 9008015
+ vrf_template: Default_VRF_Universal
+ vrf_extension_template: Default_VRF_Extension_Universal
+ vlan_id: 400
+ attach:
+ - ip_address: "{{ ansible_switch2 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int1 }}" # mandatory
+ ipv4_addr: 10.33.0.2/24 # optional
+ neighbor_ipv4: 10.33.0.1 # optional
+ ipv6_addr: 2010::10:34:0:7/64 # optional
+ neighbor_ipv6: 2010::10:34:0:3 # optional
+ dot1q: 2 # dot1q can be got from dcnm
+ - peer_vrf: ansible-vrf-int1 # optional
+ interface: "{{ ansible_int2 }}" # mandatory
+ ipv4_addr: 20.33.0.2/24 # optional
+ neighbor_ipv4: 20.33.0.1 # optional
+ ipv6_addr: 3010::10:34:0:7/64 # optional
+ neighbor_ipv6: 3010::10:34:0:3 # optional
+ dot1q: 21 # dot1q can be got from dcnm
+ - ip_address: "{{ ansible_switch3 }}"
+ vrf_lite:
+ - peer_vrf: ansible-vrf-int3 # optional
+ interface: "{{ ansible_int3 }}" # mandatory
+ ipv4_addr: 40.33.0.2/24 # optional
+ neighbor_ipv4: 40.33.0.1 # optional
+ ipv6_addr: 5010::10:34:0:7/64 # optional
+ neighbor_ipv6: 5010::10:34:0:3 # optional
+ dot1q: 4 # dot1q can be got from dcnm
+ deploy: true
+ register: result
+
+- name: Query fabric state until vrfStatus transitions to DEPLOYED state
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: query
+ register: query_result
+ until:
+ - "query_result.response[0].parent.vrfStatus is search('DEPLOYED')"
+ retries: 30
+ delay: 2
+
+- assert:
+ that:
+ - 'result.changed == true'
+ - 'result.response[0].RETURN_CODE == 200'
+ - 'result.response[1].RETURN_CODE == 200'
+ - 'result.response[2].RETURN_CODE == 200'
+ - '(result.response[1].DATA|dict2items)[0].value == "SUCCESS"'
+ - '(result.response[1].DATA|dict2items)[1].value == "SUCCESS"'
+ - 'result.diff[0].attach[0].deploy == true'
+ - 'result.diff[0].attach[1].deploy == true'
+ - '"{{ ansible_switch2 }}" in result.diff[0].attach[0].ip_address'
+ - '"{{ ansible_switch3 }}" in result.diff[0].attach[1].ip_address'
+ - 'result.diff[0].vrf_name == "ansible-vrf-int2"'
+ - '"{{ ansible_int3 }}" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int3" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"40.33.0.2/24" in query_result.response[0].attach[0].switchDetailsList[0].extensionValues'
+ - '"{{ ansible_int1 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"ansible-vrf-int1" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"10.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"{{ ansible_int2 }}" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+ - '"20.33.0.2/24" in query_result.response[0].attach[1].switchDetailsList[0].extensionValues'
+
+- name: VRF LITE - conf5 - Idempotence
+ cisco.dcnm.dcnm_vrf: *conf5
+ register: result
+
+- assert:
+ that:
+ - 'result.changed == false'
+ - 'result.response|length == 0'
+
+###############################################
+### CLEAN-UP ##
+###############################################
+
+- name: VRF LITE - Clean up any existing vrfs
+ cisco.dcnm.dcnm_vrf:
+ fabric: "{{ test_fabric }}"
+ state: deleted
diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_links_configs.json b/tests/unit/modules/dcnm/fixtures/dcnm_links_configs.json
index e2e4d3143..93d55c289 100644
--- a/tests/unit/modules/dcnm/fixtures/dcnm_links_configs.json
+++ b/tests/unit/modules/dcnm/fixtures/dcnm_links_configs.json
@@ -2199,5 +2199,149 @@
"src_interface": "Ethernet1/1",
"template": "ext_evpn_multisite_overlay_setup"
}
- ]
+ ],
+
+ "inter_src_fab_ro_config": [
+ {
+ "dst_device": "10.64.78.231",
+ "dst_fabric": "mmudigon-dst-fab-ro",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "10.64.78.227",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_ro_dst_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-1",
+ "dst_fabric": "mmudigon-dst-fab-ro",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-num-1",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_ro_src_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-num-1",
+ "dst_fabric": "mmudigon-dst-fab-ro",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-2",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_ro_src_dst_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-1",
+ "dst_fabric": "mmudigon-dst-fab-ro",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-2",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_ro_src_dst_sw_mgbl_config": [
+ {
+ "dst_device": "n9kv-num-1",
+ "dst_fabric": "mmudigon-dst-fab-ro",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-unnum-1",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_rw_dst_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-1",
+ "dst_fabric": "mmudigon-dst-fab-rw",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-num-1",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_rw_src_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-num-1",
+ "dst_fabric": "mmudigon-dst-fab-rw",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-2",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_rw_src_dst_sw_non_mgbl_config": [
+ {
+ "dst_device": "n9kv-1",
+ "dst_fabric": "mmudigon-dst-fab-rw",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-2",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }],
+
+ "inter_dst_fab_rw_src_dst_sw_mgbl_config": [
+ {
+ "dst_device": "n9kv-num-1",
+ "dst_fabric": "mmudigon-dst-fab-rw",
+ "dst_interface": "Ethernet1/1",
+ "profile": {
+ "dst_asn": 1001,
+ "ipv4_subnet": "193.168.1.1/24",
+ "neighbor_ip": "193.168.1.2",
+ "src_asn": 1000
+ },
+ "src_device": "n9kv-unnum-1",
+ "src_interface": "Ethernet1/1",
+ "template": "ext_fabric_setup"
+ }]
}
diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_links_payloads.json b/tests/unit/modules/dcnm/fixtures/dcnm_links_payloads.json
index 9c1c343c4..cfe8d748f 100644
--- a/tests/unit/modules/dcnm/fixtures/dcnm_links_payloads.json
+++ b/tests/unit/modules/dcnm/fixtures/dcnm_links_payloads.json
@@ -19,60 +19,178 @@
"mock_fab_inv_data": {
"192.168.123.150": {
+ "logicalName": "n9kv-100",
+ "serialNumber": "9M99N34RDED",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.151": {
+ "logicalName": "n9kv-200",
+ "serialNumber": "9NXHSNTEO6C",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.156": {
+ "logicalName": "n9kv-ipv6-1",
+ "serialNumber": "9BH0813WFWT",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.157": {
+ "logicalName": "n9kv-ipv6-2",
+ "serialNumber": "9ITWBH9OIAH",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.160": {
+ "logicalName": "n9kv-num-1",
+ "serialNumber": "9IF87L089SZ",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.161": {
+ "logicalName": "n9kv-num-2",
+ "serialNumber": "9FX7O3TU2QM",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.170": {
+ "logicalName": "n9kv-unnum-1",
+ "serialNumber": "9EFX823RUL3",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"192.168.123.171": {
+ "logicalName": "n9kv-unnum-2",
+ "serialNumber": "9AF3VNZYAKS",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.225": {
+ "logicalName": "n9kv-test1",
+ "serialNumber": "98YWRN9WCSC",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.226": {
+ "logicalName": "n9kv-test2",
+ "serialNumber": "94UTIRVSX58",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.227": {
+ "logicalName": "n9kv-227",
+ "serialNumber": "953E68OKK1L",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.228": {
+ "logicalName": "n9kv-228",
+ "serialNumber": "9WCPR0JUV6M",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.230": {
+ "logicalName": "test11",
+ "serialNumber": "9XLP8I4TPPM",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
},
"10.64.78.231": {
+ "logicalName": "test22",
+ "serialNumber": "9E15XSEM5MS",
"isVpcConfigured": "True",
- "vpcDomain": 1
+ "vpcDomain": 1,
+ "switchRoleEnum": "Leaf",
+ "managable": "True"
+ },
+ "10.69.69.1": {
+ "logicalName": "n9kv-1",
+ "serialNumber": "TEST-SNO-1",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ },
+ "10.69.69.2": {
+ "logicalName": "n9kv-2",
+ "serialNumber": "TEST-SNO-2",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ },
+ "10.69.69.3": {
+ "logicalName": "n9kv-3",
+ "serialNumber": "TEST-SNO-3",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ },
+ "10.69.69.4": {
+ "logicalName": "n9kv-4",
+ "serialNumber": "TEST-SNO-4",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ },
+ "10.69.69.5": {
+ "logicalName": "n9kv-5",
+ "serialNumber": "TEST-SNO-5",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ },
+ "10.69.69.6": {
+ "logicalName": "n9kv-6",
+ "serialNumber": "TEST-SNO-6",
+ "isVpcConfigured": "True",
+ "vpcDomain": 1,
+ "switchRoleEnum": "None",
+ "managable": "False"
+ }
+ },
+
+ "mock_monitor_true_resp": {
+ "RETURN_CODE": 200,
+ "DATA":{
+ "readonly": "True"
+ }
+ },
+
+ "mock_monitor_false_resp": {
+ "RETURN_CODE": 200,
+ "DATA":{
+ "readonly": "False"
}
},
@@ -211,6 +329,7 @@
"sw2-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-num-2",
"sw-serial-number": "9FX7O3TU2QM"
},
"link-dbid": 1856090,
@@ -241,6 +360,7 @@
"sw1-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-num-1",
"sw-serial-number": "9IF87L089SZ"
},
"fabricName": "mmudigon-numbered"
@@ -258,6 +378,7 @@
"sw2-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-num-2",
"sw-serial-number": "9FX7O3TU2QM"
},
"link-dbid": 1856250,
@@ -268,6 +389,7 @@
"sw1-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-num-1",
"sw-serial-number": "9IF87L089SZ"
},
"fabricName": "mmudigon-numbered"
@@ -285,6 +407,7 @@
"sw2-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/3",
+ "sw-sys-name": "n9kv-num-2",
"sw-serial-number": "9FX7O3TU2QM"
},
"nvPairs": {
@@ -309,6 +432,7 @@
"sw1-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/3",
+ "sw-sys-name": "n9kv-num-1",
"sw-serial-number": "9IF87L089SZ"
},
"fabricName": "mmudigon-numbered"
@@ -326,6 +450,7 @@
"sw2-info": {
"fabric-name": "mmudigon-unnumbered",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-unnum-2",
"sw-serial-number": "9AF3VNZYAKS"
},
"nvPairs": {
@@ -351,6 +476,7 @@
"sw1-info": {
"fabric-name": "mmudigon-unnumbered",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-unnum-1",
"sw-serial-number": "9EFX823RUL3"
},
"fabricName": "mmudigon-unnumbered"
@@ -368,6 +494,7 @@
"sw2-info": {
"fabric-name": "mmudigon-unnumbered",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-unnum-2",
"sw-serial-number": "9AF3VNZYAKS"
},
"nvPairs": {
@@ -376,6 +503,7 @@
"sw1-info": {
"fabric-name": "mmudigon-unnumbered",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-unnum-1",
"sw-serial-number": "9EFX823RUL3"
},
"fabricName": "mmudigon-unnumbered"
@@ -393,6 +521,7 @@
"sw2-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-ipv6-2",
"sw-serial-number": "9ITWBH9OIAH"
},
"nvPairs": {
@@ -418,6 +547,7 @@
"sw1-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/1",
+ "sw-sys-name": "n9kv-ipv6-1",
"sw-serial-number": "9BH0813WFWT"
},
"fabricName": "mmudigon-ipv6-underlay"
@@ -435,6 +565,7 @@
"sw2-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-ipv6-2",
"sw-serial-number": "9ITWBH9OIAH"
},
"link-dbid": 1859550,
@@ -444,6 +575,7 @@
"sw1-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/2",
+ "sw-sys-name": "n9kv-ipv6-1",
"sw-serial-number": "9BH0813WFWT"
},
"fabricName": "mmudigon-ipv6-underlay"
@@ -461,6 +593,7 @@
"sw2-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/3",
+ "sw-sys-name": "n9kv-ipv6-2",
"sw-serial-number": "9ITWBH9OIAH"
},
"nvPairs": {
@@ -491,6 +624,7 @@
"sw1-info": {
"fabric-name": "mmudigon-ipv6-underlay",
"if-name": "Ethernet1/3",
+ "sw-sys-name": "n9kv-ipv6-1",
"sw-serial-number": "9BH0813WFWT"
},
"fabricName": "mmudigon-ipv6-underlay"
@@ -508,6 +642,7 @@
"sw2-info": {
"fabric-name": "mmudigon",
"if-name": "Ethernet1/4",
+ "sw-sys-name": "n9kv-200",
"sw-serial-number": "9NXHSNTEO6C"
},
"link-dbid": 1862750,
@@ -535,6 +670,7 @@
"sw1-info": {
"fabric-name": "mmudigon",
"if-name": "Ethernet1/4",
+ "sw-sys-name": "n9kv-100",
"sw-serial-number": "9M99N34RDED"
},
"fabricName": "mmudigon"
@@ -552,12 +688,14 @@
"templateName": "ext_fabric_setup",
"sw2-info": {
"if-name": "Ethernet1/3",
+ "sw-sys-name": "test22",
"sw-serial-number": "9E15XSEM5MS",
"fabric-name": "test_net"
},
"sw1-info": {
"fabric-name": "mmudigon-numbered",
"if-name": "Ethernet1/3",
+ "sw-sys-name": "n9kv-227",
"sw-serial-number": "953E68OKK1L"
},
"nvPairs": {
diff --git a/tests/unit/modules/dcnm/fixtures/dcnm_vrf.json b/tests/unit/modules/dcnm/fixtures/dcnm_vrf.json
index 74be2ddb7..ab9e4ea77 100644
--- a/tests/unit/modules/dcnm/fixtures/dcnm_vrf.json
+++ b/tests/unit/modules/dcnm/fixtures/dcnm_vrf.json
@@ -1075,5 +1075,48 @@
"vrfStatus": "DEPLOYED"
}
]
+ },
+ "mock_vrf_lite_obj": {
+ "RETURN_CODE":200,
+ "METHOD":"GET",
+ "MESSAGE":"OK",
+ "DATA": [
+ {
+ "vrfName":"test_vrf",
+ "templateName":"Default_VRF_Extension_Universal",
+ "switchDetailsList":[
+ {
+ "switchName":"poap_test",
+ "vlan":2001,
+ "serialNumber":"9D2DAUJJFQQ",
+ "peerSerialNumber":"None",
+ "extensionValues":"None",
+ "extensionPrototypeValues":[
+ {
+ "interfaceName":"Ethernet1/3",
+ "extensionType":"VRF_LITE",
+ "extensionValues":"{\"PEER_VRF_NAME\": \"\", \"NEIGHBOR_IP\": \"10.33.0.1\", \"VRF_LITE_JYTHON_TEMPLATE\": \"Ext_VRF_Lite_Jython\", \"enableBorderExtension\": \"VRF_LITE\", \"AUTO_VRF_LITE_FLAG\":\"false\", \"IP_MASK\": \"10.33.0.2/30\", \"MTU\": \"9216\", \"NEIGHBOR_ASN\": \"23132\", \"IF_NAME\": \"Ethernet1/3\", \"IPV6_NEIGHBOR\": \"\", \"IPV6_MASK\": \"\", \"DOT1Q_ID\": \"2\", \"asn\": \"52125\"}",
+ "destInterfaceName":"Ethernet1/1",
+ "destSwitchName":"poap-import-static"
+ },
+ {
+ "interfaceName":"Ethernet1/2",
+ "extensionType":"VRF_LITE",
+ "extensionValues":"{\"PEER_VRF_NAME\": \"\", \"NEIGHBOR_IP\": \"20.33.0.1\", \"VRF_LITE_JYTHON_TEMPLATE\": \"Ext_VRF_Lite_Jython\", \"enableBorderExtension\": \"VRF_LITE\", \"AUTO_VRF_LITE_FLAG\": \"false\", \"IP_MASK\": \"20.33.0.2/30\", \"MTU\": \"9216\", \"NEIGHBOR_ASN\": \"23132\", \"IF_NAME\": \"Ethernet1/2\", \"IPV6_NEIGHBOR\": \"\", \"IPV6_MASK\": \"\", \"DOT1Q_ID\": \"2\", \"asn\": \"52125\"}",
+ "destInterfaceName":"Ethernet1/2",
+ "destSwitchName":"poap-import-static"
+ }
+ ],
+ "islanAttached":false,
+ "lanAttachedState":"NA",
+ "errorMessage":"None",
+ "instanceValues":"None",
+ "freeformConfig":"None",
+ "role":"border gateway",
+ "vlanModifiable":true
+ }
+ ]
+ }
+ ]
}
}
diff --git a/tests/unit/modules/dcnm/test_dcnm_links.py b/tests/unit/modules/dcnm/test_dcnm_links.py
index 1d3cd2bf5..46099b20e 100644
--- a/tests/unit/modules/dcnm/test_dcnm_links.py
+++ b/tests/unit/modules/dcnm/test_dcnm_links.py
@@ -111,6 +111,134 @@ def load_links_fixtures(self):
):
self.run_dcnm_fabric_info.side_effect = [self.mock_ipv6_fab_info]
+ # -------------------------- INTER-MISC --------------------------------------
+
+ if "test_dcnm_inter_links_src_fab_ro" in self._testMethodName:
+ self.run_dcnm_send.side_effect = [self.mock_monitor_true_resp, self.mock_monitor_true_resp]
+
+ if "test_dcnm_inter_links_dst_fab_ro_dst_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_true_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_ro_src_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_true_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_ro_src_dst_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_true_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_ro_src_dst_sw_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_true_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_rw_dst_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_rw_src_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_rw_src_dst_sw_non_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ config_preview_resp]
+
+ if "test_dcnm_inter_links_dst_fab_rw_src_dst_sw_mgbl" in self._testMethodName:
+
+ merge_links_resp = self.payloads_data.get(
+ "merge_links_fabric_response"
+ )
+ deploy_resp = self.payloads_data.get("deploy_resp")
+ config_preview_resp = self.payloads_data.get("config_preview_resp")
+
+ self.run_dcnm_send.side_effect = [self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ [],
+ merge_links_resp,
+ deploy_resp,
+ deploy_resp,
+ config_preview_resp,
+ config_preview_resp,
+ config_preview_resp]
+
# -------------------------- INTRA-FABRIC-UNNUMBERED --------------------------
if (
@@ -125,6 +253,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
merge_links_resp,
@@ -145,6 +274,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
merge_links_resp,
@@ -163,6 +293,7 @@ def load_links_fixtures(self):
)
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
merge_links_resp,
@@ -187,6 +318,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
deploy_resp,
@@ -205,6 +337,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
merge_links_resp,
@@ -235,6 +368,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
merge_links_resp,
@@ -260,6 +394,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
merge_links_resp,
@@ -285,6 +420,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
merge_links_resp,
@@ -310,6 +446,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
[],
@@ -336,6 +473,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
[],
[],
@@ -351,7 +489,9 @@ def load_links_fixtures(self):
== self._testMethodName
):
- self.run_dcnm_send.side_effect = [[], [], [], [], []]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ [], [], [], [], []]
if (
"test_dcnm_intra_links_unnumbered_template_change"
@@ -368,6 +508,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -379,7 +520,9 @@ def load_links_fixtures(self):
query_links_resp = self.payloads_data.get(
"intra_query_links_unnum_fabric_response"
)
- self.run_dcnm_send.side_effect = [query_links_resp]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ query_links_resp]
# -------------------------- INTRA-FABRIC-IPV6 ----------------------------------
@@ -395,6 +538,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -414,6 +558,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -445,6 +590,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -464,6 +610,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -494,6 +641,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -523,6 +671,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -553,6 +702,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -583,6 +733,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -614,6 +765,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -630,14 +782,18 @@ def load_links_fixtures(self):
== self._testMethodName
):
- self.run_dcnm_send.side_effect = [[], [], [], [], []]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ [], [], [], [], []]
if "test_dcnm_intra_links_ipv6_query" in self._testMethodName:
query_links_resp = self.payloads_data.get(
"intra_query_links_ipv6_fabric_response"
)
- self.run_dcnm_send.side_effect = [query_links_resp]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ query_links_resp]
# -------------------------- INTRA-FABRIC-NUMBERED --------------------------
@@ -653,6 +809,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -672,6 +829,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -703,6 +861,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -722,6 +881,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -751,6 +911,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -780,6 +941,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -810,6 +972,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -840,6 +1003,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -871,6 +1035,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -887,7 +1052,9 @@ def load_links_fixtures(self):
== self._testMethodName
):
- self.run_dcnm_send.side_effect = [[], [], [], [], []]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ [], [], [], [], []]
if (
"test_dcnm_intra_links_numbered_template_change"
@@ -904,6 +1071,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -915,7 +1083,9 @@ def load_links_fixtures(self):
query_links_resp = self.payloads_data.get(
"intra_query_links_num_fabric_response"
)
- self.run_dcnm_send.side_effect = [query_links_resp]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ query_links_resp]
# ------------------------------ INTRA-FABRIC-VPC ---------------------------
@@ -931,6 +1101,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
merge_links_resp,
deploy_resp,
@@ -946,6 +1117,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
merge_links_resp,
deploy_resp,
@@ -964,6 +1136,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
deploy_resp,
config_preview_resp,
@@ -981,6 +1154,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
[],
merge_links_resp,
deploy_resp,
@@ -1001,6 +1175,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -1019,6 +1194,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -1040,6 +1216,7 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -1058,10 +1235,11 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
- have_links_resp1,
+ self.mock_monitor_false_resp,
[],
[],
[],
+ have_links_resp1,
[],
delete_links_resp,
deploy_resp,
@@ -1083,10 +1261,11 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
- have_links_resp1,
+ self.mock_monitor_false_resp,
[],
[],
[],
+ have_links_resp1,
[],
delete_links_resp,
deploy_resp,
@@ -1098,14 +1277,18 @@ def load_links_fixtures(self):
== self._testMethodName
):
- self.run_dcnm_send.side_effect = [[], [], [], [], []]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ [], [], [], [], []]
if "test_dcnm_intra_links_vpc_query" in self._testMethodName:
query_links_resp = self.payloads_data.get(
"intra_query_links_vpc_response"
)
- self.run_dcnm_send.side_effect = [query_links_resp]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ query_links_resp]
# -------------------------- INTER-FABRIC-NUMBERED --------------------------
@@ -1121,6 +1304,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -1150,6 +1336,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -1200,6 +1389,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1226,6 +1418,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
[],
[],
[],
@@ -1271,6 +1466,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1318,6 +1516,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1368,6 +1569,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1418,6 +1622,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1464,6 +1671,9 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
have_links_resp2,
have_links_resp3,
@@ -1487,7 +1697,11 @@ def load_links_fixtures(self):
== self._testMethodName
):
- self.run_dcnm_send.side_effect = [[], [], [], [], [], []]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ [], [], [], [], [], []]
if (
"test_dcnm_inter_links_numbered_template_change"
@@ -1504,6 +1718,8 @@ def load_links_fixtures(self):
config_preview_resp = self.payloads_data.get("config_preview_resp")
self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
have_links_resp1,
merge_links_resp,
deploy_resp,
@@ -1512,12 +1728,23 @@ def load_links_fixtures(self):
config_preview_resp,
]
- if "test_dcnm_inter_links_numbered_query" in self._testMethodName:
+ if "test_dcnm_inter_links_numbered_query_no_config" in self._testMethodName:
query_links_resp = self.payloads_data.get(
"inter_query_links_num_fabric_response"
)
- self.run_dcnm_send.side_effect = [query_links_resp]
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ query_links_resp]
+ elif "test_dcnm_inter_links_numbered_query" in self._testMethodName:
+
+ query_links_resp = self.payloads_data.get(
+ "inter_query_links_num_fabric_response"
+ )
+ self.run_dcnm_send.side_effect = [
+ self.mock_monitor_false_resp,
+ self.mock_monitor_false_resp,
+ query_links_resp]
def load_fixtures(self, response=None, device=""):
@@ -1549,6 +1776,8 @@ def test_dcnm_intra_links_numbered_merged_new_no_opts(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1583,6 +1812,8 @@ def test_dcnm_intra_links_numbered_merged_new(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1617,6 +1848,8 @@ def test_dcnm_intra_links_numbered_merged_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1651,6 +1884,8 @@ def test_dcnm_intra_links_numbered_merged_new_no_state(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1685,6 +1920,8 @@ def test_dcnm_intra_links_numbered_merged_new_check_mode(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1722,6 +1959,8 @@ def test_dcnm_intra_links_numbered_merged_new_existing_and_non_existing(
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1757,6 +1996,8 @@ def test_dcnm_intra_links_numbered_modify_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1792,6 +2033,8 @@ def test_dcnm_intra_links_numbered_replace_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1827,6 +2070,8 @@ def test_dcnm_intra_links_numbered_delete_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1862,6 +2107,8 @@ def test_dcnm_intra_links_numbered_delete_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1897,6 +2144,8 @@ def test_dcnm_intra_links_numbered_delete_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1932,6 +2181,8 @@ def test_dcnm_intra_links_numbered_template_change(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1967,6 +2218,8 @@ def test_dcnm_intra_links_numbered_query_no_config(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -1998,6 +2251,8 @@ def test_dcnm_intra_links_numbered_query_with_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2029,6 +2284,8 @@ def test_dcnm_intra_links_numbered_query_with_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2060,6 +2317,8 @@ def test_dcnm_intra_links_numbered_query_with_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2091,6 +2350,8 @@ def test_dcnm_intra_links_numbered_query_with_src_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2122,6 +2383,8 @@ def test_dcnm_intra_links_numbered_query_with_dst_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2151,6 +2414,8 @@ def test_dcnm_intra_links_numbered_query(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2186,6 +2451,8 @@ def test_dcnm_intra_links_unnumbered_merged_new_no_opts(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_umnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2222,6 +2489,8 @@ def test_dcnm_intra_links_unnumbered_merged_new(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2258,6 +2527,8 @@ def test_dcnm_intra_links_unnumbered_merged_new_no_deploy(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2293,6 +2564,8 @@ def test_dcnm_intra_links_unnumbered_merged_existing(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2329,6 +2602,8 @@ def test_dcnm_intra_links_unnumbered_merged_new_no_state(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2365,6 +2640,8 @@ def test_dcnm_intra_links_unnumbered_merged_new_check_mode(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2404,6 +2681,8 @@ def test_dcnm_intra_links_unnumbered_merged_new_existing_and_non_existing(
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2443,6 +2722,8 @@ def test_dcnm_intra_links_unnumbered_modify_existing(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2482,6 +2763,8 @@ def test_dcnm_intra_links_unnumbered_replace_existing(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2521,6 +2804,8 @@ def test_dcnm_intra_links_unnumbered_delete_existing(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2562,6 +2847,8 @@ def test_dcnm_intra_links_unnumbered_delete_existing_and_non_existing(
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2601,6 +2888,8 @@ def test_dcnm_intra_links_unnumbered_delete_non_existing(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2638,6 +2927,8 @@ def test_dcnm_intra_links_unnumbered_template_change(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2677,6 +2968,8 @@ def test_dcnm_intra_links_unnumbered_query_not_exist(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2708,6 +3001,8 @@ def test_dcnm_intra_links_unnumbered_query_no_config(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2741,6 +3036,8 @@ def test_dcnm_intra_links_unnumbered_query_with_dst_fabric(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2774,6 +3071,8 @@ def test_dcnm_intra_links_unnumbered_query_with_src_device(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2807,6 +3106,8 @@ def test_dcnm_intra_links_unnumbered_query_with_dst_device(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2840,6 +3141,8 @@ def test_dcnm_intra_links_unnumbered_query_with_src_interface(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2873,6 +3176,8 @@ def test_dcnm_intra_links_unnumbered_query_with_dst_interface(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2904,6 +3209,8 @@ def test_dcnm_intra_links_unnumbered_query(self):
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2937,6 +3244,8 @@ def test_dcnm_intra_links_ipv6_merged_new_no_opts(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -2971,6 +3280,8 @@ def test_dcnm_intra_links_ipv6_merged_new(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3005,6 +3316,8 @@ def test_dcnm_intra_links_ipv6_merged_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3039,6 +3352,8 @@ def test_dcnm_intra_links_ipv6_merged_new_no_state(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3073,6 +3388,8 @@ def test_dcnm_intra_links_ipv6_merged_new_check_mode(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3108,6 +3425,8 @@ def test_dcnm_intra_links_ipv6_merged_new_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3143,6 +3462,8 @@ def test_dcnm_intra_links_ipv6_modify_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3180,6 +3501,8 @@ def test_dcnm_intra_links_ipv6_replace_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3215,6 +3538,8 @@ def test_dcnm_intra_links_ipv6_delete_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3250,6 +3575,8 @@ def test_dcnm_intra_links_ipv6_delete_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3285,6 +3612,8 @@ def test_dcnm_intra_links_ipv6_delete_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3318,6 +3647,8 @@ def test_dcnm_intra_links_ipv6_query_no_config(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3349,6 +3680,8 @@ def test_dcnm_intra_links_ipv6_query_with_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3380,6 +3713,8 @@ def test_dcnm_intra_links_ipv6_query_with_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3411,6 +3746,8 @@ def test_dcnm_intra_links_ipv6_query_with_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3442,6 +3779,8 @@ def test_dcnm_intra_links_ipv6_query_with_src_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3473,6 +3812,8 @@ def test_dcnm_intra_links_ipv6_query_with_dst_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3502,6 +3843,8 @@ def test_dcnm_intra_links_ipv6_query(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3533,6 +3876,8 @@ def test_dcnm_intra_links_ipv6_query_not_exist(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3566,6 +3911,8 @@ def test_dcnm_intra_links_vpc_merged_new_no_opts(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3598,6 +3945,8 @@ def test_dcnm_intra_links_vpc_merged_new(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3630,6 +3979,8 @@ def test_dcnm_intra_links_vpc_merged_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3662,6 +4013,8 @@ def test_dcnm_intra_links_vpc_merged_new_no_state(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3694,6 +4047,8 @@ def test_dcnm_intra_links_vpc_merged_new_check_mode(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3727,6 +4082,8 @@ def test_dcnm_intra_links_vpc_merged_new_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3760,6 +4117,8 @@ def test_dcnm_intra_links_vpc_modify_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3793,6 +4152,8 @@ def test_dcnm_intra_links_vpc_replace_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3826,6 +4187,8 @@ def test_dcnm_intra_links_vpc_delete_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3859,6 +4222,8 @@ def test_dcnm_intra_links_vpc_delete_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3892,6 +4257,8 @@ def test_dcnm_intra_links_vpc_delete_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3925,6 +4292,8 @@ def test_dcnm_intra_links_vpc_query_no_config(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3956,6 +4325,8 @@ def test_dcnm_intra_links_vpc_query_with_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -3987,6 +4358,8 @@ def test_dcnm_intra_links_vpc_query_with_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4018,6 +4391,8 @@ def test_dcnm_intra_links_vpc_query_with_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4049,6 +4424,8 @@ def test_dcnm_intra_links_vpc_query_with_src_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4080,6 +4457,8 @@ def test_dcnm_intra_links_vpc_query_with_dst_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4109,6 +4488,8 @@ def test_dcnm_intra_links_vpc_query(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4146,6 +4527,8 @@ def test_dcnm_inter_links_numbered_merged_new_no_opts(self):
"mock_unnum_fab_data"
)
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4182,6 +4565,8 @@ def test_dcnm_inter_links_numbered_merged_new(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4222,6 +4607,8 @@ def test_dcnm_inter_links_numbered_merged_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4262,6 +4649,8 @@ def test_dcnm_inter_links_numbered_merged_new_no_state(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4302,6 +4691,8 @@ def test_dcnm_inter_links_numbered_merged_new_check_mode(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4345,6 +4736,8 @@ def test_dcnm_inter_links_numbered_merged_new_existing_and_non_existing(
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4386,6 +4779,8 @@ def test_dcnm_inter_links_numbered_modify_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4427,6 +4822,8 @@ def test_dcnm_inter_links_numbered_replace_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4468,6 +4865,8 @@ def test_dcnm_inter_links_numbered_delete_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4509,6 +4908,8 @@ def test_dcnm_inter_links_numbered_delete_existing_and_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4550,6 +4951,8 @@ def test_dcnm_inter_links_numbered_delete_non_existing(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4589,6 +4992,8 @@ def test_dcnm_inter_links_numbered_template_change(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4629,6 +5034,8 @@ def test_dcnm_inter_links_numbered_query_no_config(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4664,6 +5071,8 @@ def test_dcnm_inter_links_numbered_query_with_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4699,6 +5108,8 @@ def test_dcnm_inter_links_numbered_query_with_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4734,6 +5145,8 @@ def test_dcnm_inter_links_numbered_query_with_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4769,6 +5182,8 @@ def test_dcnm_inter_links_numbered_query_with_src_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4804,6 +5219,8 @@ def test_dcnm_inter_links_numbered_query_with_dst_interface(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4837,6 +5254,8 @@ def test_dcnm_inter_links_numbered_query(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4872,6 +5291,8 @@ def test_dcnm_inter_links_numbered_query_not_exist(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_unnum_fab_info = self.payloads_data.get(
"mock_unnum_fab_data"
)
@@ -4909,6 +5330,8 @@ def test_dcnm_intra_links_invalid_template(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -4940,6 +5363,8 @@ def test_dcnm_intra_links_missing_src_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(dict(state="merged", config=self.playbook_config))
@@ -4967,6 +5392,8 @@ def test_dcnm_intra_links_missing_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5000,6 +5427,8 @@ def test_dcnm_intra_links_missing_src_intf(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5034,6 +5463,8 @@ def test_dcnm_intra_links_missing_dst_intf(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5068,6 +5499,8 @@ def test_dcnm_intra_links_missing_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5101,6 +5534,8 @@ def test_dcnm_intra_links_missing_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5134,6 +5569,8 @@ def test_dcnm_intra_links_missing_template(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5167,6 +5604,8 @@ def test_dcnm_intra_links_missing_peer1_ipv6(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
set_module_args(
@@ -5202,6 +5641,8 @@ def test_dcnm_intra_links_missing_peer2_ipv6(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
set_module_args(
@@ -5237,6 +5678,8 @@ def test_dcnm_intra_links_missing_peer1_ipv4(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5271,6 +5714,8 @@ def test_dcnm_intra_links_missing_peer2_ipv4(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5303,6 +5748,8 @@ def test_dcnm_intra_links_missing_mtu(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5336,6 +5783,8 @@ def test_dcnm_intra_links_missing_admin_state(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5371,6 +5820,8 @@ def test_dcnm_inter_links_invalid_template(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5402,6 +5853,8 @@ def test_dcnm_inter_links_missing_src_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(dict(state="merged", config=self.playbook_config))
@@ -5429,6 +5882,8 @@ def test_dcnm_inter_links_missing_dst_fabric(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5462,6 +5917,8 @@ def test_dcnm_inter_links_missing_src_intf(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5496,6 +5953,8 @@ def test_dcnm_inter_links_missing_dst_intf(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5530,6 +5989,8 @@ def test_dcnm_inter_links_missing_src_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5563,6 +6024,8 @@ def test_dcnm_inter_links_missing_dst_device(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5596,6 +6059,8 @@ def test_dcnm_inter_links_missing_template(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5629,6 +6094,8 @@ def test_dcnm_inter_links_missing_ipv4_subnet(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
set_module_args(
@@ -5663,6 +6130,8 @@ def test_dcnm_inter_links_missing_neighbor_ip(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
self.mock_ipv6_fab_info = self.payloads_data.get("mock_ipv6_fab_data")
set_module_args(
@@ -5697,6 +6166,8 @@ def test_dcnm_inter_links_missing_src_asn(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5730,6 +6201,8 @@ def test_dcnm_inter_links_missing_dst_asn(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5763,6 +6236,8 @@ def test_dcnm_inter_links_missing_ipv4_addr(self):
self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
set_module_args(
dict(
@@ -5781,3 +6256,307 @@ def test_dcnm_inter_links_missing_ipv4_addr(self):
self.assertEqual(
("ipv4_addr : Required parameter not found" in str(e)), True
)
+
+ # ---------------------- INTER-FABRIC MISC ----------------------------
+
+ def test_dcnm_inter_links_src_fab_ro(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_src_fab_ro_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-src-fab-ro",
+ config=self.playbook_config,
+ )
+ )
+
+ result = None
+
+ try:
+ result = self.execute_module(changed=False, failed=False)
+ except Exception as e:
+ self.assertEqual(result, None)
+ self.assertEqual(
+ ("is in Monitoring mode" in str(e)), True
+ )
+ self.assertEqual(
+ ("No changes are allowed on the fabric" in str(e)), True
+ )
+
+ def test_dcnm_inter_links_dst_fab_ro_dst_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_ro_dst_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 1)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-ro"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_ro_src_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_ro_src_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-ro"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_ro_src_dst_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_ro_src_dst_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-ro"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_ro_src_dst_sw_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_ro_src_dst_sw_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 1)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-ro"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_rw_dst_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_rw_dst_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 1)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-rw"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_rw_src_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_rw_src_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-rw"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_rw_src_dst_sw_non_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_rw_src_dst_sw_non_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-rw"]), 0)
+
+ def test_dcnm_inter_links_dst_fab_rw_src_dst_sw_mgbl(self):
+
+ # load the json from playbooks
+ self.config_data = loadPlaybookData("dcnm_links_configs")
+ self.payloads_data = loadPlaybookData("dcnm_links_payloads")
+
+ # load required config data
+ self.playbook_config = self.config_data.get(
+ "inter_dst_fab_rw_src_dst_sw_mgbl_config"
+ )
+ self.mock_ip_sn = self.payloads_data.get("mock_ip_sn")
+ self.mock_hn_sn = self.payloads_data.get("mock_hn_sn")
+ self.mock_fab_inv = self.payloads_data.get("mock_fab_inv_data")
+ self.mock_num_fab_info = self.payloads_data.get("mock_num_fab_data")
+ self.mock_monitor_true_resp = self.payloads_data.get("mock_monitor_true_resp")
+ self.mock_monitor_false_resp = self.payloads_data.get("mock_monitor_false_resp")
+
+ set_module_args(
+ dict(
+ state="merged",
+ src_fabric="mmudigon-numbered",
+ config=self.playbook_config,
+ )
+ )
+
+ result = self.execute_module(changed=True, failed=False)
+
+ self.assertEqual(len(result["diff"][0]["merged"]), 1)
+ self.assertEqual(len(result["diff"][0]["modified"]), 0)
+ self.assertEqual(len(result["diff"][0]["deleted"]), 0)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-numbered"]), 1)
+ self.assertEqual(len(result["diff"][0]["deploy"][0]["mmudigon-dst-fab-rw"]), 1)
diff --git a/tests/unit/modules/dcnm/test_dcnm_vrf.py b/tests/unit/modules/dcnm/test_dcnm_vrf.py
index 289e46aca..d63acbcf2 100644
--- a/tests/unit/modules/dcnm/test_dcnm_vrf.py
+++ b/tests/unit/modules/dcnm/test_dcnm_vrf.py
@@ -127,6 +127,7 @@ def init_data(self):
self.mock_vrf_attach_lite_object = copy.deepcopy(
self.test_data.get("mock_vrf_attach_lite_object")
)
+ self.mock_vrf_lite_obj = copy.deepcopy(self.test_data.get("mock_vrf_lite_obj"))
def setUp(self):
super(TestDcnmVrfModule, self).setUp()
@@ -199,9 +200,11 @@ def load_fixtures(self, response=None, device=""):
]
elif "_merged_lite_new" in self._testMethodName:
+ self.init_data()
self.run_dcnm_send.side_effect = [
self.blank_data,
self.blank_data,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
]
@@ -270,25 +273,27 @@ def load_fixtures(self, response=None, device=""):
self.deploy_success_resp,
]
- elif "_merged_lite_update_vlan" in self._testMethodName:
+ elif "_merged_lite_update" in self._testMethodName:
self.init_data()
self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object]
self.run_dcnm_send.side_effect = [
self.mock_vrf_object,
self.mock_vrf_attach_get_ext_object_merge_att1_only,
self.mock_vrf_attach_get_ext_object_merge_att2_only,
- self.blank_data,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
]
- elif "_merged_lite_update" in self._testMethodName:
+ elif "_merged_lite_vlan_update" in self._testMethodName:
self.init_data()
self.run_dcnm_get_url.side_effect = [self.mock_vrf_attach_object]
self.run_dcnm_send.side_effect = [
self.mock_vrf_object,
self.mock_vrf_attach_get_ext_object_merge_att1_only,
self.mock_vrf_attach_get_ext_object_merge_att2_only,
+ self.blank_data,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
]
@@ -307,6 +312,7 @@ def load_fixtures(self, response=None, device=""):
self.init_data()
self.run_dcnm_send.side_effect = [
self.mock_vrf_object,
+ self.mock_vrf_lite_obj,
self.mock_vrf_attach_object_pending,
self.blank_data,
self.mock_vrf_attach_get_ext_object_merge_att1_only,
@@ -360,6 +366,7 @@ def load_fixtures(self, response=None, device=""):
self.mock_vrf_object,
self.mock_vrf_attach_get_ext_object_merge_att1_only,
self.mock_vrf_attach_get_ext_object_merge_att4_only,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
self.delete_success_resp,
@@ -384,9 +391,11 @@ def load_fixtures(self, response=None, device=""):
]
elif "lite_override_with_additions" in self._testMethodName:
+ self.init_data()
self.run_dcnm_send.side_effect = [
self.blank_data,
self.blank_data,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
]
@@ -406,6 +415,7 @@ def load_fixtures(self, response=None, device=""):
self.mock_vrf_object,
self.mock_vrf_attach_get_ext_object_merge_att1_only,
self.mock_vrf_attach_get_ext_object_merge_att4_only,
+ self.mock_vrf_lite_obj,
self.attach_success_resp,
self.deploy_success_resp,
self.mock_vrf_attach_object_del_not_ready,
@@ -772,7 +782,7 @@ def test_dcnm_vrf_merged_with_update_vlan(self):
self.assertEqual(result["response"][2]["DATA"]["status"], "")
self.assertEqual(result["response"][2]["RETURN_CODE"], self.SUCCESS_RETURN_CODE)
- def test_dcnm_vrf_merged_lite_update_vlan(self):
+ def test_dcnm_vrf_merged_lite_vlan_update(self):
set_module_args(
dict(
state="merged",
|