diff --git a/plugins/module_utils/constants.py b/plugins/module_utils/constants.py index 5d09e74..ac482a8 100644 --- a/plugins/module_utils/constants.py +++ b/plugins/module_utils/constants.py @@ -156,7 +156,11 @@ "disable", "restart", "delete", + "deleted", "update", + "merged", + "replaced", + "overridden", ) INTERFACE_FLOW_RULES_TYPES_MAPPING = {"port_channel": "PORTCHANNEL", "physical": "PHYSICAL", "l3out_sub_interface": "L3_SUBIF", "l3out_svi": "SVI"} diff --git a/plugins/module_utils/nd.py b/plugins/module_utils/nd.py index cca3ed4..55ac517 100644 --- a/plugins/module_utils/nd.py +++ b/plugins/module_utils/nd.py @@ -18,7 +18,6 @@ from ansible.module_utils.basic import json from ansible.module_utils.basic import env_fallback from ansible.module_utils.six import PY3 -from ansible.module_utils.six.moves import filterfalse from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils._text import to_native, to_text from ansible.module_utils.connection import Connection @@ -73,53 +72,27 @@ def cmp(a, b): def issubset(subset, superset): - """Recurse through nested dictionary and compare entries""" + """Recurse through a nested dictionary and check if it is a subset of another.""" - # Both objects are the same object - if subset is superset: - return True - - # Both objects are identical - if subset == superset: - return True - - # Both objects have a different type - if isinstance(subset) is not isinstance(superset): + if type(subset) is not type(superset): return False + if not isinstance(subset, dict): + if isinstance(subset, list): + return all(item in superset for item in subset) + return subset == superset + for key, value in subset.items(): - # Ignore empty values if value is None: - return True + continue - # Item from subset is missing from superset if key not in superset: return False - # Item has different types in subset and superset - if isinstance(superset.get(key)) is not isinstance(value): - return False + superset_value = superset.get(key) - # Compare if item values are subset - if isinstance(value, dict): - if not issubset(superset.get(key), value): - return False - elif isinstance(value, list): - try: - # NOTE: Fails for lists of dicts - if not set(value) <= set(superset.get(key)): - return False - except TypeError: - # Fall back to exact comparison for lists of dicts - diff = list(filterfalse(lambda i: i in value, superset.get(key))) + list(filterfalse(lambda j: j in superset.get(key), value)) - if diff: - return False - elif isinstance(value, set): - if not value <= superset.get(key): - return False - else: - if not value == superset.get(key): - return False + if not issubset(value, superset_value): + return False return True @@ -210,6 +183,9 @@ def __init__(self, module): # info output self.previous = dict() + self.before = [] + self.commands = [] + self.after = [] self.proposed = dict() self.sent = dict() self.stdout = None @@ -433,6 +409,7 @@ def exit_json(self, **kwargs): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if self.params.get("output_level") in ("debug", "info"): self.result["previous"] = self.previous + self.result["before"] = self.before # FIXME: Modified header only works for PATCH if not self.has_modified and self.previous != self.existing: self.result["changed"] = True @@ -450,8 +427,10 @@ def exit_json(self, **kwargs): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent self.result["proposed"] = self.proposed + self.result["commands"] = self.commands self.result["current"] = self.existing + self.result["after"] = self.after if self.module._diff and self.result.get("changed") is True: self.result["diff"] = dict( @@ -468,6 +447,7 @@ def fail_json(self, msg, **kwargs): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: if self.params.get("output_level") in ("debug", "info"): self.result["previous"] = self.previous + self.result["before"] = self.before # FIXME: Modified header only works for PATCH if not self.has_modified and self.previous != self.existing: self.result["changed"] = True @@ -486,8 +466,10 @@ def fail_json(self, msg, **kwargs): if self.params.get("state") in ALLOWED_STATES_TO_APPEND_SENT_AND_PROPOSED: self.result["sent"] = self.sent self.result["proposed"] = self.proposed + self.result["commands"] = self.commands self.result["current"] = self.existing + self.result["after"] = self.after self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result) @@ -499,16 +481,22 @@ def check_changed(self): existing["password"] = self.sent.get("password") return not issubset(self.sent, existing) - def get_diff(self, unwanted=None): + def get_diff(self, unwanted=None, previous=None, payload=None): """Check if existing payload and sent payload and removing keys that are not required""" if unwanted is None: unwanted = [] - if not self.existing and self.sent: - return True + + if previous is None and payload is None: + if not self.existing and self.sent: + return True existing = self.existing sent = self.sent + if previous and payload: + existing = previous + sent = payload + for key in unwanted: if isinstance(key, str): if key in existing: @@ -516,6 +504,7 @@ def get_diff(self, unwanted=None): del existing[key] except KeyError: pass + if key in sent: try: del sent[key] except KeyError: @@ -524,15 +513,53 @@ def get_diff(self, unwanted=None): key_path, last = key[:-1], key[-1] try: existing_parent = reduce(dict.get, key_path, existing) - del existing_parent[last] + if existing_parent is not None: + del existing_parent[last] except KeyError: pass try: sent_parent = reduce(dict.get, key_path, sent) - del sent_parent[last] + if sent_parent is not None: + del sent_parent[last] except KeyError: pass return not issubset(sent, existing) def set_to_empty_string_when_none(self, val): return val if val is not None else "" + + def get_object_by_nested_key_value(self, path, nested_key_path, value, data_key=None): + + response_data = self.request(path, method="GET") + + if not response_data: + return None + + object_list = [] + if isinstance(response_data, list): + object_list = response_data + elif data_key and data_key in response_data: + object_list = response_data.get(data_key) + else: + return None + + keys = nested_key_path.split(".") + + for obj in object_list: + current_level = obj + for key in keys: + if isinstance(current_level, dict): + current_level = current_level.get(key) + else: + current_level = None + break + + if current_level == value: + return obj + + return None + + def delete(self, check_mode, path): + if not check_mode: + self.request(path, method="DELETE") + return {"path": path, "method": "DELETE"} diff --git a/plugins/module_utils/utils.py b/plugins/module_utils/utils.py new file mode 100644 index 0000000..df6b357 --- /dev/null +++ b/plugins/module_utils/utils.py @@ -0,0 +1,67 @@ +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Sabari Jaganathan (@sajagana) +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + + +def snake_to_camel(snake_str, upper_case_components=None): + if snake_str is not None and "_" in snake_str: + if upper_case_components is None: + upper_case_components = [] + components = snake_str.split("_") + camel_case_str = components[0] + + for component in components[1:]: + if component in upper_case_components: + camel_case_str += component.upper() + else: + camel_case_str += component.title() + + return camel_case_str + else: + return snake_str + + +def compare_config_and_remote_objects(remote_objects, config_objects, key="name"): + remote_object_names = {obj[key] for obj in remote_objects} + config_object_names = {obj[key] for obj in config_objects} + + # Common objects from Config (name in both remote and config data) + update = [obj for obj in config_objects if obj[key] in remote_object_names] + + # Unmatched objects from Remote (name not in Config) + delete = [obj for obj in remote_objects if obj[key] not in config_object_names] + + # Unmatched objects from Config (name not in Remote) + create = [obj for obj in config_objects if obj[key] not in remote_object_names] + + return { + "config_data_update": update, + "remote_data_delete": delete, # Only when state is overridden + "config_data_create": create, + } + + +def compare_unordered_list_of_dicts(list1, list2): + if (not isinstance(list1, list) or not isinstance(list2, list)) or (len(list1) != len(list2)): + return False + + for dict1 in list1: + found_match = False + for i, dict2 in enumerate(list2): + if dict1 == dict2: + list2.pop(i) + found_match = True + break + if not found_match: + return False + + return True + + +def wrap_objects_by_key(object_list, key="name"): + return {obj.get(key): obj for obj in object_list} diff --git a/plugins/modules/nd_backup_schedule.py b/plugins/modules/nd_backup_schedule.py new file mode 100644 index 0000000..a5f1421 --- /dev/null +++ b/plugins/modules/nd_backup_schedule.py @@ -0,0 +1,294 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# Copyright: (c) 2025, Sabari Jaganathan (@sajagana) +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +from __future__ import absolute_import, division, print_function + +__metaclass__ = type + +ANSIBLE_METADATA = {"metadata_version": "1.1", "status": ["preview"], "supported_by": "community"} + +DOCUMENTATION = r""" +--- +module: nd_backup_schedule +version_added: "0.5.0" +short_description: Manages backup schedules on Cisco Nexus Dashboard. +description: +- Manage backup schedules on Cisco Nexus Dashboard. +- This module is only supported on ND v4.1 and later. +author: +- Sabari Jaganathan (@sajagana) +options: + config: + description: + - The configuration of the backup schedules. + - Use O(state=overridden) and O(config=[]) to delete all backup schedules. + type: list + elements: dict + suboptions: + name: + description: + - The name of the backup schedule. + type: str + required: true + encryption_key: + description: + - The encryption key for a backup file. + - This parameter is required when creating or updating the backup schedule. + type: str + remote_location: + description: + - The name of the remote storage location. + - This parameter is required when creating the backup schedule. + type: str + frequency: + description: + - The frequency at which remote backups are scheduled to occur at specified intervals on selected days. + - This parameter is required when creating the backup schedule. + type: int + scheduler_date: + description: + - The start date for the backup schedule in the format O(config.scheduler_date="YYYY-MM-DD"). + - This parameter is required when creating the backup schedule. + type: str + aliases: [ scheduler_start_date, start_date, date ] + scheduler_time: + description: + - The start time for the backup schedule in the format O(config.scheduler_time="HH-MM-SS"). + - This parameter is required when creating the backup schedule. + type: str + aliases: [ scheduler_start_time, start_time, time ] + backup_type: + description: + - This parameter specifies the kind of snapshot created for the Nexus Dashboard. + - The O(config.backup_type=config_only) option creates a snapshot that specifically captures the configuration settings of the Nexus Dashboard. + - The O(config.backup_type=full) option creates a complete snapshot of the entire Nexus Dashboard. + - This parameter is required when creating the backup schedule. + type: str + choices: [ config_only, full ] + aliases: [ type ] + state: + description: + - Use O(state=merged) or O(state=present) to create new objects or update existing objects on the Cisco Nexus Dashboard based on the provided config. + - Use O(state=replaced) to create or recreate objects on the Cisco Nexus Dashboard, replacing them with those defined in the config. + - Use O(state=overridden) to keep only the config-specified objects on the Cisco Nexus Dashboard, deleting all others. + - Use C(query) to retrieve and list the current objects on the Cisco Nexus Dashboard. + - Use O(state=deleted) or O(state=absent) to remove objects from the Cisco Nexus Dashboard. + type: str + choices: [ merged, present, replaced, deleted, absent, overridden, query ] + default: merged +extends_documentation_fragment: +- cisco.nd.modules +- cisco.nd.check_mode +""" + +EXAMPLES = r""" +- name: Create a list of backup schedules + cisco.nd.nd_backup_schedule: + state: merged + config: + - name: daily + encryption_key: testtest1 + frequency: 1 + scheduler_date: "2025-01-02" + scheduler_time: "11:04:05" + remote_location: test + backup_type: config_only + - name: weekly + encryption_key: testtest1 + frequency: 7 + scheduler_date: "2025-01-02" + scheduler_time: "12:04:05" + remote_location: test + backup_type: config_only + +- name: Update a list of backup schedules + cisco.nd.nd_backup_schedule: + state: merged + config: + - name: daily + encryption_key: testtest1 + frequency: 2 + scheduler_date: "2025-01-02" + scheduler_time: "11:10:05" + remote_location: test + backup_type: full + - name: weekly + encryption_key: testtest1 + frequency: 5 + scheduler_date: "2025-01-02" + scheduler_time: "12:10:05" + remote_location: test + backup_type: full + +- name: Query one backup schedule + cisco.nd.nd_backup_schedule: + output_level: debug + state: query + config: + - name: monthly + register: query_one + +- name: Query all backup schedules + cisco.nd.nd_backup_schedule: + output_level: debug + state: query + register: query_all + +- name: Delete all backup schedules + cisco.nd.nd_backup_schedule: + output_level: debug + state: overridden + config: [] +""" + +RETURN = r""" +""" + + +import datetime +from ansible.module_utils.basic import AnsibleModule +from ansible_collections.cisco.nd.plugins.module_utils.nd import NDModule, nd_argument_spec +from ansible_collections.cisco.nd.plugins.module_utils.utils import ( + snake_to_camel, + compare_config_and_remote_objects, + compare_unordered_list_of_dicts, + wrap_objects_by_key, +) +import copy + + +def main(): + argument_spec = nd_argument_spec() + argument_spec.update( + state=dict( + type="str", + default="merged", + choices=["merged", "replaced", "deleted", "overridden", "query", "present", "absent"], + ), + config=dict( + required=False, + type="list", + elements="dict", + options=dict( + name=dict(type="str", required=True), + encryption_key=dict(type="str", no_log=True), + remote_location=dict(type="str"), + frequency=dict(type="int"), + scheduler_date=dict(type="str", aliases=["scheduler_start_date", "start_date", "date"]), + scheduler_time=dict(type="str", aliases=["scheduler_start_time", "start_time", "time"]), + backup_type=dict(type="str", choices=["config_only", "full"], aliases=["type"]), + ), + ), + ) + + module = AnsibleModule( + argument_spec=argument_spec, + supports_check_mode=True, + ) + + nd = NDModule(module) + + base_path = "/api/v1/infra/backups/schedules" + + config = nd.params.get("config") + state = nd.params.get("state") + schedules = nd.request(base_path, method="GET").get("schedules") + + remote_schedule_map = wrap_objects_by_key(schedules) + + if state == "query": + if config: + for object in config: + if remote_schedule_map.get(object.get("name")): + nd.after.append(remote_schedule_map.get(object.get("name"))) + else: + nd.after = schedules + nd.exit_json() + + nd.before = copy.deepcopy(schedules) + + result = compare_config_and_remote_objects(schedules, config) + + if state != "query": + if state in ["deleted", "absent"]: # Delete only specified objects in the config + delete_object_list = config + nd.after = result.get("remote_data_delete") # Unmatched objects from ND (name not in Config) + else: + delete_object_list = result.get("remote_data_delete") + + for object in delete_object_list: + if state in ["overridden", "deleted", "absent"]: + nd.commands.append(nd.delete(module.check_mode, "{0}/{1}".format(base_path, object.get("name")))) + else: # Ignore delete operation when the state is merged/present, replaced + nd.after.append(object) + + if state not in ["deleted", "absent"]: + for object in result.get("config_data_create"): + post_backup_schedule_config(nd, module, base_path, object) + + for object in result.get("config_data_update"): + if remote_schedule_map.get(object.get("name")): + if state in ["replaced", "overridden"]: # Force recreate the object when state is replaced/overridden + post_backup_schedule_config(nd, module, base_path, object, None, method="PUT") + else: # Use the existing object value when the optional attribute is None when the state is merged/present + post_backup_schedule_config(nd, module, base_path, object, remote_schedule_map.get(object.get("name")), method="PUT") + + if not compare_unordered_list_of_dicts(nd.after, copy.deepcopy(nd.before)): + nd.result["changed"] = True + + nd.exit_json() + + +def post_backup_schedule_config(nd, module, path, config_obj, remote_obj=None, method="POST"): + start_time = get_backup_schedule_time(config_obj.get("scheduler_date"), config_obj.get("scheduler_time")) + payload = { + "encryptionKey": config_obj.get("encryption_key"), + "name": config_obj.get("name"), + "type": snake_to_camel(config_obj.get("backup_type")), + "frequency": config_obj.get("frequency"), + "remoteLocation": config_obj.get("remote_location"), + "startTime": start_time, + } + + changed = None + if method == "PUT": + path = "{0}/{1}".format(path, payload.get("name")) + if remote_obj: + payload["frequency"] = payload["frequency"] or remote_obj.get("frequency") + payload["remoteLocation"] = payload["remoteLocation"] or remote_obj.get("remoteLocation") + payload["startTime"] = payload["startTime"] or remote_obj.get("startTime") + if nd.get_diff(unwanted=["encryptionKey", "user"], previous=copy.deepcopy(remote_obj), payload=copy.deepcopy(payload)): + changed = True + + nd.sanitize(payload, collate=True) + nd.commands.append(nd.proposed) + + if not module.check_mode: + if method == "PUT": + if changed or remote_obj is None: + nd.request(path, method=method, data=payload) + nd.after.append(nd.request(path, method="GET")) + elif remote_obj and not changed: # Ignore the PUT call when the object does not have a valid change + nd.after.append(remote_obj) + elif method == "POST": + nd.request(path, method=method, data=payload) + path = "{0}/{1}".format(path, payload.get("name")) + nd.after.append(nd.request(path, method="GET")) + else: + nd.after.append(payload) + + +def get_backup_schedule_time(scheduler_date, scheduler_time): + if scheduler_date and scheduler_time: + date_object = datetime.datetime.strptime(scheduler_date, "%Y-%m-%d") + time_object = datetime.datetime.strptime(scheduler_time, "%H:%M:%S") + return "{:04d}-{:02d}-{:02d}T{:02d}:{:02d}:{:02d}Z".format( + date_object.year, date_object.month, date_object.day, time_object.hour, time_object.minute, time_object.second + ) + + +if __name__ == "__main__": + main() diff --git a/tests/integration/targets/nd_backup_schedule/tasks/main.yml b/tests/integration/targets/nd_backup_schedule/tasks/main.yml new file mode 100644 index 0000000..46bf22e --- /dev/null +++ b/tests/integration/targets/nd_backup_schedule/tasks/main.yml @@ -0,0 +1,582 @@ +# Test code for the ND modules +# Copyright: (c) 2025, Sabari Jaganathan (@sajagana) + +# GNU General Public License v3.0+ (see LICENSE or https://www.gnu.org/licenses/gpl-3.0.txt) + +# CLEAN TEST ENVIRONMENT +- name: Delete all backup schedules with empty overridden config + cisco.nd.nd_backup_schedule: + output_level: debug + state: overridden + config: [] + register: delete_all_1 + +# CREATE +- name: Create backup schedules with state merged (check_mode) + cisco.nd.nd_backup_schedule: &cm_add_with_merged + output_level: debug + state: merged + config: + - name: daily + encryption_key: testtest1 + frequency: 1 + scheduler_date: "2025-01-02" + scheduler_time: "11:04:05" + remote_location: test + backup_type: config_only + - name: weekly + encryption_key: testtest1 + frequency: 7 + scheduler_date: "2025-01-02" + scheduler_time: "12:04:05" + remote_location: test + backup_type: config_only + check_mode: true + register: cm_add_with_merged + +- name: Create backup schedules with state merged + cisco.nd.nd_backup_schedule: + <<: *cm_add_with_merged + register: add_with_merged + +- name: Create backup schedules with state merged again + cisco.nd.nd_backup_schedule: + <<: *cm_add_with_merged + register: add_with_merged_again + +- name: Create monthly backup schedule with state merged - expected to fail + cisco.nd.nd_backup_schedule: + output_level: debug + state: merged + config: + - name: monthly + encryption_key: testtest1 + frequency: 30 + scheduler_date: "2025-01-02" + scheduler_time: "13:04:05" + remote_location: test + backup_type: config_only + ignore_errors: true + register: nt_add_with_merged + +- name: Assertion check for create backup schedules with state merged + ansible.builtin.assert: + that: + - cm_add_with_merged is changed + - cm_add_with_merged.after.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_add_with_merged.after.0.frequency == 1 + - cm_add_with_merged.after.0.name == "daily" + - cm_add_with_merged.after.0.remoteLocation == "test" + - cm_add_with_merged.after.0.startTime == "2025-01-02T11:04:05Z" + - cm_add_with_merged.after.0.type == "configOnly" + - cm_add_with_merged.after.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_add_with_merged.after.1.frequency == 7 + - cm_add_with_merged.after.1.name == "weekly" + - cm_add_with_merged.after.1.remoteLocation == "test" + - cm_add_with_merged.after.1.startTime == "2025-01-02T12:04:05Z" + - cm_add_with_merged.after.1.type == "configOnly" + - cm_add_with_merged.before == [] + - cm_add_with_merged.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_add_with_merged.commands.0.frequency == 1 + - cm_add_with_merged.commands.0.name == "daily" + - cm_add_with_merged.commands.0.remoteLocation == "test" + - cm_add_with_merged.commands.0.startTime == "2025-01-02T11:04:05Z" + - cm_add_with_merged.commands.0.type == "configOnly" + - cm_add_with_merged.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_add_with_merged.commands.1.frequency == 7 + - cm_add_with_merged.commands.1.name == "weekly" + - cm_add_with_merged.commands.1.remoteLocation == "test" + - cm_add_with_merged.commands.1.startTime == "2025-01-02T12:04:05Z" + - cm_add_with_merged.commands.1.type == "configOnly" + - add_with_merged is changed + - add_with_merged.after.0.frequency == 1 + - add_with_merged.after.0.name == "daily" + - add_with_merged.after.0.remoteLocation == "test" + - add_with_merged.after.0.startTime == "2025-01-02T11:04:05Z" + - add_with_merged.after.0.type == "configOnly" + - add_with_merged.after.1.frequency == 7 + - add_with_merged.after.1.name == "weekly" + - add_with_merged.after.1.remoteLocation == "test" + - add_with_merged.after.1.startTime == "2025-01-02T12:04:05Z" + - add_with_merged.after.1.type == "configOnly" + - add_with_merged.before == [] + - add_with_merged.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - add_with_merged.commands.0.frequency == 1 + - add_with_merged.commands.0.name == "daily" + - add_with_merged.commands.0.remoteLocation == "test" + - add_with_merged.commands.0.startTime == "2025-01-02T11:04:05Z" + - add_with_merged.commands.0.type == "configOnly" + - add_with_merged.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - add_with_merged.commands.1.frequency == 7 + - add_with_merged.commands.1.name == "weekly" + - add_with_merged.commands.1.remoteLocation == "test" + - add_with_merged.commands.1.startTime == "2025-01-02T12:04:05Z" + - add_with_merged.commands.1.type == "configOnly" + - add_with_merged_again is not changed + - add_with_merged_again.after.0.name == add_with_merged_again.before.0.name == "daily" + - add_with_merged_again.after.1.name == add_with_merged_again.before.1.name == "weekly" + - add_with_merged_again.after.0 == add_with_merged_again.before.0 + - add_with_merged_again.after.1 == add_with_merged_again.before.1 + - nt_add_with_merged is failed + - nt_add_with_merged.after.0.name == "daily" + - nt_add_with_merged.after.1.name == "weekly" + - nt_add_with_merged.before.0.name == "daily" + - nt_add_with_merged.before.1.name == "weekly" + - nt_add_with_merged.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - nt_add_with_merged.commands.0.frequency == 30 + - nt_add_with_merged.commands.0.name == "monthly" + - nt_add_with_merged.commands.0.remoteLocation == "test" + - nt_add_with_merged.commands.0.startTime == "2025-01-02T13:04:05Z" + - nt_add_with_merged.commands.0.type == "configOnly" + - nt_add_with_merged.msg == "ND Error 400{{':'}} The maximum number of backup schedules is 2" + +# UPDATE +- name: Update the daily backup schedule with state merged (check_mode) + cisco.nd.nd_backup_schedule: &cm_update_with_merged + output_level: debug + state: merged + config: + - name: daily + encryption_key: testtest1 + scheduler_date: "2025-01-02" + scheduler_time: "11:10:05" + remote_location: test + backup_type: full + check_mode: true + register: cm_update_with_merged + +- name: Update the daily backup schedule with state merged + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_merged + register: update_with_merged + +- name: Update the daily backup schedule with state merged again + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_merged + register: update_with_merged_again + +- name: Assertion check for update the daily backup schedule + ansible.builtin.assert: + that: + - cm_update_with_merged is changed + - cm_update_with_merged.after.0.frequency == 7 + - cm_update_with_merged.after.0.name == "weekly" + - cm_update_with_merged.after.0.remoteLocation == "test" + - cm_update_with_merged.after.0.startTime == "2025-01-02T12:04:05Z" + - cm_update_with_merged.after.0.type == "configOnly" + - cm_update_with_merged.before.1.frequency == 7 + - cm_update_with_merged.before.1.name == "weekly" + - cm_update_with_merged.before.1.remoteLocation == "test" + - cm_update_with_merged.before.1.startTime == "2025-01-02T12:04:05Z" + - cm_update_with_merged.before.1.type == "configOnly" + - cm_update_with_merged.after.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_merged.after.1.frequency == 1 + - cm_update_with_merged.after.1.name == "daily" + - cm_update_with_merged.after.1.remoteLocation == "test" + - cm_update_with_merged.after.1.startTime == "2025-01-02T11:10:05Z" + - cm_update_with_merged.after.1.type == "full" + - cm_update_with_merged.before.0.frequency == 1 + - cm_update_with_merged.before.0.name == "daily" + - cm_update_with_merged.before.0.remoteLocation == "test" + - cm_update_with_merged.before.0.startTime == "2025-01-02T11:04:05Z" + - cm_update_with_merged.before.0.type == "configOnly" + - cm_update_with_merged.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_merged.commands.0.frequency == 1 + - cm_update_with_merged.commands.0.name == "daily" + - cm_update_with_merged.commands.0.remoteLocation == "test" + - cm_update_with_merged.commands.0.startTime == "2025-01-02T11:10:05Z" + - cm_update_with_merged.commands.0.type == "full" + - update_with_merged is changed + - update_with_merged.after.0.frequency == 7 + - update_with_merged.after.0.name == "weekly" + - update_with_merged.after.0.remoteLocation == "test" + - update_with_merged.after.0.startTime == "2025-01-02T12:04:05Z" + - update_with_merged.after.0.type == "configOnly" + - update_with_merged.before.1.frequency == 7 + - update_with_merged.before.1.name == "weekly" + - update_with_merged.before.1.remoteLocation == "test" + - update_with_merged.before.1.startTime == "2025-01-02T12:04:05Z" + - update_with_merged.before.1.type == "configOnly" + - update_with_merged.after.1.frequency == 1 + - update_with_merged.after.1.name == "daily" + - update_with_merged.after.1.remoteLocation == "test" + - update_with_merged.after.1.startTime == "2025-01-02T11:10:05Z" + - update_with_merged.after.1.type == "full" + - update_with_merged.before.0.frequency == 1 + - update_with_merged.before.0.name == "daily" + - update_with_merged.before.0.remoteLocation == "test" + - update_with_merged.before.0.startTime == "2025-01-02T11:04:05Z" + - update_with_merged.before.0.type == "configOnly" + - update_with_merged.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_merged.commands.0.frequency == 1 + - update_with_merged.commands.0.name == "daily" + - update_with_merged.commands.0.remoteLocation == "test" + - update_with_merged.commands.0.startTime == "2025-01-02T11:10:05Z" + - update_with_merged.commands.0.type == "full" + - update_with_merged_again is not changed + - update_with_merged_again.after.0.frequency == 7 + - update_with_merged_again.after.0.name == "weekly" + - update_with_merged_again.after.0.remoteLocation == "test" + - update_with_merged_again.after.0.startTime == "2025-01-02T12:04:05Z" + - update_with_merged_again.after.0.type == "configOnly" + - update_with_merged_again.after.1.frequency == 1 + - update_with_merged_again.after.1.name == "daily" + - update_with_merged_again.after.1.remoteLocation == "test" + - update_with_merged_again.after.1.startTime == "2025-01-02T11:10:05Z" + - update_with_merged_again.after.1.type == "full" + - update_with_merged_again.before.0.frequency == 1 + - update_with_merged_again.before.0.name == "daily" + - update_with_merged_again.before.0.remoteLocation == "test" + - update_with_merged_again.before.0.startTime == "2025-01-02T11:10:05Z" + - update_with_merged_again.before.0.type == "full" + - update_with_merged_again.before.1.frequency == 7 + - update_with_merged_again.before.1.name == "weekly" + - update_with_merged_again.before.1.remoteLocation == "test" + - update_with_merged_again.before.1.startTime == "2025-01-02T12:04:05Z" + - update_with_merged_again.before.1.type == "configOnly" + +- name: Update the backup schedules with state overridden to delete unspecified backup schedules from the remote (check_mode) + cisco.nd.nd_backup_schedule: &cm_update_with_overridden + output_level: debug + state: overridden + config: + - name: daily + encryption_key: testtest1 + frequency: 2 + scheduler_date: "2025-01-02" + scheduler_time: "11:15:05" + remote_location: test + backup_type: config_only + - name: quarterly + encryption_key: testtest1 + frequency: 90 + scheduler_date: "2025-01-02" + scheduler_time: "23:50:05" + remote_location: test + backup_type: config_only + check_mode: true + register: cm_update_with_overridden + +- name: Update the backup schedules with state overridden to delete unspecified backup schedules from the remote + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_overridden + register: update_with_overridden + +- name: Update the backup schedules with state overridden to delete unspecified backup schedules from the remote again + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_overridden + register: update_with_overridden_again + +- name: Assertion check for update the backup schedules with state overridden to delete unspecified backup schedules from the remote + ansible.builtin.assert: + that: + - cm_update_with_overridden is changed + - cm_update_with_overridden.after.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_overridden.after.0.frequency == 90 + - cm_update_with_overridden.after.0.name == "quarterly" + - cm_update_with_overridden.after.0.remoteLocation == "test" + - cm_update_with_overridden.after.0.startTime == "2025-01-02T23:50:05Z" + - cm_update_with_overridden.after.0.type == "configOnly" + - cm_update_with_overridden.after.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_overridden.after.1.frequency == 2 + - cm_update_with_overridden.after.1.name == "daily" + - cm_update_with_overridden.after.1.remoteLocation == "test" + - cm_update_with_overridden.after.1.startTime == "2025-01-02T11:15:05Z" + - cm_update_with_overridden.after.1.type == "configOnly" + - cm_update_with_overridden.before.0.frequency == 1 + - cm_update_with_overridden.before.0.name == "daily" + - cm_update_with_overridden.before.0.remoteLocation == "test" + - cm_update_with_overridden.before.0.startTime == "2025-01-02T11:10:05Z" + - cm_update_with_overridden.before.0.type == "full" + - cm_update_with_overridden.before.1.frequency == 7 + - cm_update_with_overridden.before.1.name == "weekly" + - cm_update_with_overridden.before.1.remoteLocation == "test" + - cm_update_with_overridden.before.1.startTime == "2025-01-02T12:04:05Z" + - cm_update_with_overridden.before.1.type == "configOnly" + - cm_update_with_overridden.commands.0.method == "DELETE" + - cm_update_with_overridden.commands.0.path == "/api/v1/infra/backups/schedules/weekly" + - cm_update_with_overridden.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_overridden.commands.1.frequency == 90 + - cm_update_with_overridden.commands.1.name == "quarterly" + - cm_update_with_overridden.commands.1.remoteLocation == "test" + - cm_update_with_overridden.commands.1.startTime == "2025-01-02T23:50:05Z" + - cm_update_with_overridden.commands.1.type == "configOnly" + - cm_update_with_overridden.commands.2.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_overridden.commands.2.frequency == 2 + - cm_update_with_overridden.commands.2.name == "daily" + - cm_update_with_overridden.commands.2.remoteLocation == "test" + - cm_update_with_overridden.commands.2.startTime == "2025-01-02T11:15:05Z" + - cm_update_with_overridden.commands.2.type == "configOnly" + - update_with_overridden is changed + - update_with_overridden.after.0.frequency == 90 + - update_with_overridden.after.0.name == "quarterly" + - update_with_overridden.after.0.remoteLocation == "test" + - update_with_overridden.after.0.startTime == "2025-01-02T23:50:05Z" + - update_with_overridden.after.0.type == "configOnly" + - update_with_overridden.after.1.frequency == 2 + - update_with_overridden.after.1.name == "daily" + - update_with_overridden.after.1.remoteLocation == "test" + - update_with_overridden.after.1.startTime == "2025-01-02T11:15:05Z" + - update_with_overridden.after.1.type == "configOnly" + - update_with_overridden.before.0.frequency == 1 + - update_with_overridden.before.0.name == "daily" + - update_with_overridden.before.0.remoteLocation == "test" + - update_with_overridden.before.0.startTime == "2025-01-02T11:10:05Z" + - update_with_overridden.before.0.type == "full" + - update_with_overridden.before.1.frequency == 7 + - update_with_overridden.before.1.name == "weekly" + - update_with_overridden.before.1.remoteLocation == "test" + - update_with_overridden.before.1.startTime == "2025-01-02T12:04:05Z" + - update_with_overridden.before.1.type == "configOnly" + - update_with_overridden.commands.0.method == "DELETE" + - update_with_overridden.commands.0.path == "/api/v1/infra/backups/schedules/weekly" + - update_with_overridden.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_overridden.commands.1.frequency == 90 + - update_with_overridden.commands.1.name == "quarterly" + - update_with_overridden.commands.1.remoteLocation == "test" + - update_with_overridden.commands.1.startTime == "2025-01-02T23:50:05Z" + - update_with_overridden.commands.1.type == "configOnly" + - update_with_overridden.commands.2.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_overridden.commands.2.frequency == 2 + - update_with_overridden.commands.2.name == "daily" + - update_with_overridden.commands.2.remoteLocation == "test" + - update_with_overridden.commands.2.startTime == "2025-01-02T11:15:05Z" + - update_with_overridden.commands.2.type == "configOnly" + - update_with_overridden_again is not changed + - update_with_overridden_again.after.0.name == update_with_overridden_again.before.0.name == "daily" + - update_with_overridden_again.after.1.name == update_with_overridden_again.before.1.name == "quarterly" + - update_with_overridden_again.after.0 == update_with_overridden_again.before.0 + - update_with_overridden_again.after.1 == update_with_overridden_again.before.1 + +- name: Delete the quarterly backup schedule with state deleted (check_mode) + cisco.nd.nd_backup_schedule: &cm_rm_with_deleted + output_level: debug + state: deleted + config: + - name: quarterly + encryption_key: testtest1 + frequency: 90 + scheduler_date: "2025-01-02" + scheduler_time: "23:50:05" + remote_location: test + backup_type: config_only + check_mode: true + register: cm_rm_with_deleted + +- name: Delete the quarterly backup schedule with state deleted + cisco.nd.nd_backup_schedule: + <<: *cm_rm_with_deleted + register: rm_with_deleted + +- name: Delete the quarterly backup schedule with state deleted again + cisco.nd.nd_backup_schedule: + <<: *cm_rm_with_deleted + register: rm_with_deleted_again + +- name: Update the backup schedules with state replaced (check_mode) + cisco.nd.nd_backup_schedule: &cm_update_with_replaced + output_level: debug + state: replaced + config: + - name: daily + encryption_key: testtest1 + frequency: 1 + scheduler_date: "2025-01-02" + scheduler_time: "11:15:05" + remote_location: test + backup_type: config_only + - name: monthly + encryption_key: testtest1 + frequency: 30 + scheduler_date: "2025-01-02" + scheduler_time: "13:04:05" + remote_location: test + backup_type: config_only + check_mode: true + register: cm_update_with_replaced + +- name: Update the backup schedules with state replaced + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_replaced + register: update_with_replaced + +- name: Update the backup schedules with state replaced again + cisco.nd.nd_backup_schedule: + <<: *cm_update_with_replaced + register: update_with_replaced_again + +- name: Update one backup schedules with state replaced + cisco.nd.nd_backup_schedule: + output_level: debug + state: replaced + config: + - name: monthly + encryption_key: testtest1 + frequency: 28 + scheduler_date: "2025-01-02" + scheduler_time: "13:04:05" + remote_location: test + backup_type: config_only + register: update_with_replaced_1 + +- name: Assertion check for update the backup schedules with state replaced + ansible.builtin.assert: + that: + - cm_update_with_replaced is changed + - cm_update_with_replaced.after.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_replaced.after.0.frequency == 30 + - cm_update_with_replaced.after.0.name == "monthly" + - cm_update_with_replaced.after.0.remoteLocation == "test" + - cm_update_with_replaced.after.0.startTime == "2025-01-02T13:04:05Z" + - cm_update_with_replaced.after.0.type == "configOnly" + - cm_update_with_replaced.after.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_replaced.after.1.frequency == 1 + - cm_update_with_replaced.after.1.name == "daily" + - cm_update_with_replaced.after.1.remoteLocation == "test" + - cm_update_with_replaced.after.1.startTime == "2025-01-02T11:15:05Z" + - cm_update_with_replaced.after.1.type == "configOnly" + - cm_update_with_replaced.before.0.frequency == 2 + - cm_update_with_replaced.before.0.name == "daily" + - cm_update_with_replaced.before.0.remoteLocation == "test" + - cm_update_with_replaced.before.0.startTime == "2025-01-02T11:15:05Z" + - cm_update_with_replaced.before.0.type == "configOnly" + - cm_update_with_replaced.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_replaced.commands.0.frequency == 30 + - cm_update_with_replaced.commands.0.name == "monthly" + - cm_update_with_replaced.commands.0.remoteLocation == "test" + - cm_update_with_replaced.commands.0.startTime == "2025-01-02T13:04:05Z" + - cm_update_with_replaced.commands.0.type == "configOnly" + - cm_update_with_replaced.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - cm_update_with_replaced.commands.1.frequency == 1 + - cm_update_with_replaced.commands.1.name == "daily" + - cm_update_with_replaced.commands.1.remoteLocation == "test" + - cm_update_with_replaced.commands.1.startTime == "2025-01-02T11:15:05Z" + - cm_update_with_replaced.commands.1.type == "configOnly" + - update_with_replaced is changed + - update_with_replaced.after.0.name == "monthly" + - update_with_replaced.after.0.frequency == 30 + - update_with_replaced.after.0.remoteLocation == "test" + - update_with_replaced.after.0.startTime == "2025-01-02T13:04:05Z" + - update_with_replaced.after.0.type == "configOnly" + - update_with_replaced.after.1.frequency == 1 + - update_with_replaced.after.1.name == "daily" + - update_with_replaced.after.1.remoteLocation == "test" + - update_with_replaced.after.1.startTime == "2025-01-02T11:15:05Z" + - update_with_replaced.after.1.type == "configOnly" + - update_with_replaced.before.0.frequency == 2 + - update_with_replaced.before.0.name == "daily" + - update_with_replaced.before.0.remoteLocation == "test" + - update_with_replaced.before.0.startTime == "2025-01-02T11:15:05Z" + - update_with_replaced.before.0.type == "configOnly" + - update_with_replaced.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_replaced.commands.0.frequency == 30 + - update_with_replaced.commands.0.name == "monthly" + - update_with_replaced.commands.0.remoteLocation == "test" + - update_with_replaced.commands.0.startTime == "2025-01-02T13:04:05Z" + - update_with_replaced.commands.0.type == "configOnly" + - update_with_replaced.commands.1.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_replaced.commands.1.frequency == 1 + - update_with_replaced.commands.1.name == "daily" + - update_with_replaced.commands.1.remoteLocation == "test" + - update_with_replaced.commands.1.startTime == "2025-01-02T11:15:05Z" + - update_with_replaced.commands.1.type == "configOnly" + - update_with_replaced_again is not changed + - update_with_replaced_again.after.0.name == update_with_replaced_again.before.0.name == "daily" + - update_with_replaced_again.after.0 == update_with_replaced_again.before.0 + - update_with_replaced_again.after.1.name == update_with_replaced_again.before.1.name == "monthly" + - update_with_replaced_again.after.1 == update_with_replaced_again.before.1 + - update_with_replaced_1 is changed + - update_with_replaced_1.after.0.name == update_with_replaced_1.before.0.name == "daily" + - update_with_replaced_1.after.0 == update_with_replaced_1.before.0 + - update_with_replaced_1.after.1.frequency == 28 + - update_with_replaced_1.after.1.name == update_with_replaced_1.before.1.name == "monthly" + - update_with_replaced_1.after.1.remoteLocation == update_with_replaced_1.before.1.remoteLocation == "test" + - update_with_replaced_1.after.1.startTime == update_with_replaced_1.before.1.startTime == "2025-01-02T13:04:05Z" + - update_with_replaced_1.after.1.type == update_with_replaced_1.before.1.type == "configOnly" + - update_with_replaced_1.commands.0.encryptionKey == "VALUE_SPECIFIED_IN_NO_LOG_PARAMETER" + - update_with_replaced_1.commands.0.frequency == 28 + - update_with_replaced_1.commands.0.name == "monthly" + - update_with_replaced_1.commands.0.remoteLocation == "test" + - update_with_replaced_1.commands.0.startTime == "2025-01-02T13:04:05Z" + - update_with_replaced_1.commands.0.type == "configOnly" + +# QUERY +- name: Query one backup schedule + cisco.nd.nd_backup_schedule: + output_level: debug + state: query + config: + - name: monthly + register: query_one + +- name: Query all backup schedules + cisco.nd.nd_backup_schedule: + output_level: debug + state: query + register: query_all + +- name: Assertion check for query backup schedules + ansible.builtin.assert: + that: + - query_one is not changed + - query_one.after.0.frequency == 28 + - query_one.after.0.name == "monthly" + - query_one.after.0.remoteLocation == "test" + - query_one.after.0.startTime == "2025-01-02T13:04:05Z" + - query_one.after.0.type == "configOnly" + - query_all is not changed + - query_all.after.0.frequency == 1 + - query_all.after.0.name == "daily" + - query_all.after.0.remoteLocation == "test" + - query_all.after.0.startTime == "2025-01-02T11:15:05Z" + - query_all.after.0.type == "configOnly" + - query_all.after.1.frequency == 28 + - query_all.after.1.name == "monthly" + - query_all.after.1.remoteLocation == "test" + - query_all.after.1.startTime == "2025-01-02T13:04:05Z" + - query_all.after.1.type == "configOnly" + +# DELETE +- name: Delete all backup schedules with empty overridden config + cisco.nd.nd_backup_schedule: + output_level: debug + state: overridden + config: [] + register: delete_all_2 + +- name: Assertion check for delete backup schedules + ansible.builtin.assert: + that: + - cm_rm_with_deleted is changed + - cm_rm_with_deleted.after.0.name == cm_rm_with_deleted.before.0.name == "daily" + - cm_rm_with_deleted.after.0 == cm_rm_with_deleted.before.0 + - cm_rm_with_deleted.before.1.frequency == 90 + - cm_rm_with_deleted.before.1.name == "quarterly" + - cm_rm_with_deleted.before.1.remoteLocation == "test" + - cm_rm_with_deleted.before.1.startTime == "2025-01-02T23:50:05Z" + - cm_rm_with_deleted.before.1.type == "configOnly" + - cm_rm_with_deleted.commands.0.method == "DELETE" + - cm_rm_with_deleted.commands.0.path == "/api/v1/infra/backups/schedules/quarterly" + - rm_with_deleted is changed + - rm_with_deleted.after.0.name == rm_with_deleted.before.0.name == "daily" + - rm_with_deleted.after.0 == rm_with_deleted.before.0 + - rm_with_deleted.before.1.frequency == 90 + - rm_with_deleted.before.1.name == "quarterly" + - rm_with_deleted.before.1.remoteLocation == "test" + - rm_with_deleted.before.1.startTime == "2025-01-02T23:50:05Z" + - rm_with_deleted.before.1.type == "configOnly" + - rm_with_deleted.commands.0.method == "DELETE" + - rm_with_deleted.commands.0.path == "/api/v1/infra/backups/schedules/quarterly" + - rm_with_deleted_again is not changed + - rm_with_deleted_again.after.0.name == rm_with_deleted_again.before.0.name == "daily" + - rm_with_deleted_again.after.0 == rm_with_deleted_again.before.0 + - rm_with_deleted_again.commands.0.method == "DELETE" + - rm_with_deleted_again.commands.0.path == "/api/v1/infra/backups/schedules/quarterly" + - delete_all_2 is changed + - delete_all_2.after == [] + - delete_all_2.before.0.name == "daily" + - delete_all_2.before.1.name == "monthly" + - delete_all_2.commands.0.method == "DELETE" + - delete_all_2.commands.0.path == "/api/v1/infra/backups/schedules/daily" + - delete_all_2.commands.1.method == "DELETE" + - delete_all_2.commands.1.path == "/api/v1/infra/backups/schedules/monthly"